movement audio system for client
All checks were successful
studiorailgun/Renderer/pipeline/head This commit looks good

This commit is contained in:
austin 2024-08-08 11:33:51 -04:00
parent 5e87dec05d
commit d7619692ed
46 changed files with 780 additions and 121 deletions

View File

@ -0,0 +1,87 @@
{
"ignoredVoxelTypes" : [
0
],
"defaultSurfaceAudio" : {
"voxelTypeIds" : [
1
],
"footstepRegularBareAudioPaths" : [
"Audio/movement/surface/dirt/Bare Step Gravel Medium A.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Medium B.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Medium C.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Medium D.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Medium E.wav"
],
"footstepHeavyBareAudioPaths" : [
"Audio/movement/surface/dirt/Bare Step Gravel Hard A.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Hard B.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Hard C.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Hard D.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Hard E.wav"
],
"footstepRegularShoeAudioPaths" : [
"Audio/movement/surface/dirt/Shoe Step Gravel Medium A.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Medium B.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Medium C.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Medium D.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Medium E.wav"
],
"footstepHeavyShoeAudioPaths" : [
"Audio/movement/surface/dirt/Shoe Step Gravel Hard A.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Hard B.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Hard C.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Hard D.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Hard E.wav"
],
"jumpAudioPaths" : [
"Audio/movement/surface/dirt/Jump Step Gravel A.wav",
"Audio/movement/surface/dirt/Jump Step Gravel B.wav"
],
"landAudioPaths" : [
"Audio/movement/surface/dirt/Land Step Gravel A.wav",
"Audio/movement/surface/dirt/Land Step Gravel B.wav"
]
},
"surfaceAudio" : [
{
"voxelTypeIds" : [1],
"footstepRegularBareAudioPaths" : [
"Audio/movement/surface/dirt/Bare Step Gravel Medium A.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Medium B.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Medium C.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Medium D.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Medium E.wav"
],
"footstepHeavyBareAudioPaths" : [
"Audio/movement/surface/dirt/Bare Step Gravel Hard A.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Hard B.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Hard C.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Hard D.wav",
"Audio/movement/surface/dirt/Bare Step Gravel Hard E.wav"
],
"footstepRegularShoeAudioPaths" : [
"Audio/movement/surface/dirt/Shoe Step Gravel Medium A.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Medium B.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Medium C.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Medium D.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Medium E.wav"
],
"footstepHeavyShoeAudioPaths" : [
"Audio/movement/surface/dirt/Shoe Step Gravel Hard A.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Hard B.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Hard C.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Hard D.wav",
"Audio/movement/surface/dirt/Shoe Step Gravel Hard E.wav"
],
"jumpAudioPaths" : [
"Audio/movement/surface/dirt/Jump Step Gravel A.wav",
"Audio/movement/surface/dirt/Jump Step Gravel B.wav"
],
"landAudioPaths" : [
"Audio/movement/surface/dirt/Land Step Gravel A.wav",
"Audio/movement/surface/dirt/Land Step Gravel B.wav"
]
}
]
}

View File

@ -131,6 +131,8 @@
"maxVelocity" : 500.5,
"strafeMultiplier" : 0.7,
"backpedalMultiplier" : 0.5,
"footstepFirstAudioOffset" : 0.2,
"footstepSecondAudioOffset" : 0.6,
"animationStartup" : {
"nameThirdPerson" : "Jog",
"priorityCategory" : "CORE_MOVEMENT"
@ -180,9 +182,6 @@
"nameThirdPerson" : "Land",
"nameFirstPerson" : "Land",
"priorityCategory" : "MOVEMENT_MODIFIER"
},
"audioData" : {
"audioPath" : "Audio/movement/landing-on-the-ground-4.wav"
}
}
}

View File

@ -1,3 +1,3 @@
#maven.buildNumber.plugin properties file
#Thu Aug 01 18:31:38 EDT 2024
buildNumber=199
#Thu Aug 08 09:35:08 EDT 2024
buildNumber=200

View File

@ -59,12 +59,16 @@ public class AudioBuffer {
//whether this buffer has created an al buffer object or not
boolean isBuffered = false;
//The filepath associated with this buffer
String filePath = null;
/**
* Creates the audio buffer object
* @param fileNameRaw The path for the audio file
*/
public AudioBuffer(String fileNameRaw) {
String fileNameSanitized = FileUtils.sanitizeFilePath(fileNameRaw);
this.filePath = fileNameSanitized;
bufferId = alGenBuffers();
//read vorbis
@ -275,6 +279,22 @@ public class AudioBuffer {
return bufferId;
}
/**
* Gets the file path of the buffer
* @return The file path
*/
public String getFilePath(){
return this.filePath;
}
/**
* Gets the length of this audio buffer
* @return The length
*/
public double getLength(){
return this.length;
}
/**
* Cleans up this audio buffer
*/

View File

@ -5,6 +5,10 @@ import electrosphere.entity.types.camera.CameraEntityUtils;
import electrosphere.logger.LoggerInterface;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioSystem;
@ -55,6 +59,9 @@ public class AudioEngine {
//if true, efx present and active
boolean hasEFX = false;
//The list of sources being tracked
private List<AudioSource> openALSources = new CopyOnWriteArrayList<AudioSource>();
/**
* Creates an audio engine
@ -180,6 +187,7 @@ public class AudioEngine {
*/
public void update(){
updateListener();
updateOpenALSources();
}
/**
@ -190,6 +198,38 @@ public class AudioEngine {
alcCloseDevice(device);
}
/**
* Registers an openal audio source with the engine
* @param source The audio source
*/
protected void registerSource(AudioSource source){
this.openALSources.add(source);
}
/**
* Updates the status of all tracked sources
*/
private void updateOpenALSources(){
List<AudioSource> toRemove = new LinkedList<AudioSource>();
for(AudioSource source : this.openALSources){
if(!source.isPlaying()){
toRemove.add(source);
}
}
for(AudioSource source : toRemove){
this.openALSources.remove(source);
source.cleanup();
}
}
/**
* Gets the list of openal sources
* @return The list of sources
*/
public List<AudioSource> getOpenALSources(){
return Collections.unmodifiableList(this.openALSources);
}
/**
* Sets the gain of the engine
* @param gain The gain value

View File

@ -20,14 +20,16 @@ public class AudioSource {
* @param loop if true, will loop audio, otherwise will not
* @param relative if true, will make the audio source position relative to the listener based on position
*/
protected AudioSource(boolean loop, boolean relative){
this.sourceId = AL10.alGenSources();
protected static AudioSource create(boolean loop, boolean relative){
AudioSource rVal = new AudioSource();
rVal.sourceId = AL10.alGenSources();
if (loop) {
AL10.alSourcei(sourceId, AL10.AL_LOOPING, AL10.AL_TRUE);
AL10.alSourcei(rVal.sourceId, AL10.AL_LOOPING, AL10.AL_TRUE);
}
if (relative) {
AL10.alSourcei(sourceId, AL10.AL_SOURCE_RELATIVE, AL10.AL_TRUE);
AL10.alSourcei(rVal.sourceId, AL10.AL_SOURCE_RELATIVE, AL10.AL_TRUE);
}
return rVal;
}
/**

View File

@ -23,7 +23,7 @@ public class AudioUtils {
AudioSource rVal = null;
AudioBuffer buffer = Globals.assetManager.fetchAudio(audioFile);
if(buffer != null && Globals.audioEngine.initialized()){
rVal = new AudioSource(loops,false);
rVal = AudioSource.create(loops,true);
rVal.setBuffer(buffer.getBufferId());
rVal.setGain(Globals.audioEngine.getGain());
rVal.setPosition(position);
@ -55,7 +55,7 @@ public class AudioUtils {
AudioSource rVal = null;
AudioBuffer buffer = Globals.assetManager.fetchAudio(FileUtils.sanitizeFilePath(audioFile));
if(buffer != null && Globals.audioEngine.initialized()){
rVal = new AudioSource(loops,false);
rVal = AudioSource.create(loops,false);
rVal.setBuffer(buffer.getBufferId());
rVal.setGain(Globals.audioEngine.getGain());
rVal.play();

View File

@ -0,0 +1,195 @@
package electrosphere.audio.movement;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.joml.Vector3d;
import electrosphere.audio.VirtualAudioSourceManager.VirtualAudioSourceType;
import electrosphere.engine.Globals;
import electrosphere.game.data.audio.SurfaceAudioCollection;
import electrosphere.game.data.audio.SurfaceAudioType;
import electrosphere.logger.LoggerInterface;
/**
* Service that provides utilities and management for playing audio under different movement conditions
*/
public class MovementAudioService {
/**
* Different types of interactions with the surface
*/
public static enum InteractionType {
/**
* A bare footstep
*/
STEP_BARE_REG,
/**
* A bare, heavy footstep
*/
STEP_BARE_HEAVY,
/**
* A shoed foostep
*/
STEP_SHOE_REG,
/**
* A shoed, heavy foostep
*/
STEP_SHOE_HEAVY,
/**
* Jumping from the surface
*/
JUMP,
/**
* Landing on the surface
*/
LAND,
}
//The default surface audio data
SurfaceAudioType defaultSurfaceAudio;
//Maps voxel types to the corresponding surface audio
Map<Integer,SurfaceAudioType> surfaceAudioMap = new HashMap<Integer,SurfaceAudioType>();
//The voxel types to ignore
List<Integer> ignoredVoxelTypes;
/**
* The random used to pick an audio file
*/
Random random = new Random();
/**
* Initializes the movement audio service
*/
public void init(){
SurfaceAudioCollection surfaceAudioCollection = Globals.gameConfigCurrent.getSurfaceAudioCollection();
this.ignoredVoxelTypes = surfaceAudioCollection.getIgnoredVoxelTypes();
this.defaultSurfaceAudio = surfaceAudioCollection.getDefaultSurfaceAudio();
for(SurfaceAudioType audioType : surfaceAudioCollection.getSurfaceAudio()){
for(int voxelType : audioType.getVoxelTypeIds()){
if(surfaceAudioMap.containsKey(voxelType)){
LoggerInterface.loggerAudio.ERROR(new IllegalStateException("Duplicate voxel definitions in the surface audio definitions! " + voxelType));
}
surfaceAudioMap.put(voxelType,audioType);
}
}
//queue all audio to be loaded
this.loadSurfaceAudio(this.defaultSurfaceAudio);
for(SurfaceAudioType audioType : surfaceAudioCollection.getSurfaceAudio()){
this.loadSurfaceAudio(audioType);
}
}
/**
* Gets the audio path to play
* @param voxelType The type of voxel
* @param type The interaction type
* @return The path to the audio file to play
*/
public String getAudioPath(int voxelType, InteractionType type){
SurfaceAudioType surfaceAudio = this.defaultSurfaceAudio;
//Check if ignored
for(int ignoredVoxelType : this.ignoredVoxelTypes){
if(ignoredVoxelType == voxelType){
return null;
}
}
//gets the surface audio definition
if(surfaceAudioMap.containsKey(voxelType)){
surfaceAudio = surfaceAudioMap.get(voxelType);
} else {
LoggerInterface.loggerAudio.WARNING("Surface undefined in the surface audio collection! " + voxelType);
}
//gets the list to pull from
List<String> availableFiles = surfaceAudio.getFootstepRegularBareAudioPaths();
switch(type){
case STEP_BARE_REG: {
availableFiles = surfaceAudio.getFootstepRegularBareAudioPaths();
} break;
case STEP_BARE_HEAVY: {
availableFiles = surfaceAudio.getFootstepHeavyBareAudioPaths();
} break;
case STEP_SHOE_REG: {
availableFiles = surfaceAudio.getFootstepRegularShoeAudioPaths();
} break;
case STEP_SHOE_HEAVY: {
availableFiles = surfaceAudio.getFootstepHeavyShoeAudioPaths();
} break;
case JUMP: {
availableFiles = surfaceAudio.getJumpAudioPaths();
} break;
case LAND: {
availableFiles = surfaceAudio.getLandAudioPaths();
} break;
}
//return the audio
int roll = random.nextInt(availableFiles.size());
return availableFiles.get(roll);
}
/**
* Plays an interaction
* @param voxelType The voxel type
* @param type The interaction type
*/
public void playAudio(int voxelType, InteractionType type){
String audioPath = this.getAudioPath(voxelType, type);
if(audioPath != null){
Globals.virtualAudioSourceManager.createVirtualAudioSource(audioPath, VirtualAudioSourceType.CREATURE, false);
}
}
/**
* Plays an interaction at a given position
* @param voxelType The voxel type
* @param type The interaction type
* @param position The position of the audio
*/
public void playAudioPositional(int voxelType, InteractionType type, Vector3d position){
String audioPath = this.getAudioPath(voxelType, type);
if(audioPath != null){
Globals.virtualAudioSourceManager.createVirtualAudioSource(audioPath, VirtualAudioSourceType.CREATURE, false, position);
}
}
/**
* Loads a given surface audio definition into memory
* @param surfaceAudioType The surface audio definition
*/
private void loadSurfaceAudio(SurfaceAudioType surfaceAudioType){
for(String audioPath : surfaceAudioType.getFootstepRegularBareAudioPaths()){
Globals.assetManager.addAudioPathToQueue(audioPath);
}
for(String audioPath : surfaceAudioType.getFootstepHeavyBareAudioPaths()){
Globals.assetManager.addAudioPathToQueue(audioPath);
}
for(String audioPath : surfaceAudioType.getFootstepRegularShoeAudioPaths()){
Globals.assetManager.addAudioPathToQueue(audioPath);
}
for(String audioPath : surfaceAudioType.getFootstepHeavyShoeAudioPaths()){
Globals.assetManager.addAudioPathToQueue(audioPath);
}
for(String audioPath : surfaceAudioType.getJumpAudioPaths()){
Globals.assetManager.addAudioPathToQueue(audioPath);
}
for(String audioPath : surfaceAudioType.getLandAudioPaths()){
Globals.assetManager.addAudioPathToQueue(audioPath);
}
}
}

View File

@ -93,7 +93,12 @@ public class ClientWorldData {
return convertChunkToRealSpace(world);
}
public Vector3i convertRealToChunkSpace(Vector3d position){
/**
* Converts a real space position to its world space equivalent
* @param position The real space position
* @return The world space position (ie the chunk containing the real space position)
*/
public Vector3i convertRealToWorldSpace(Vector3d position){
return new Vector3i(
convertRealToChunkSpace(position.x),
convertRealToChunkSpace(position.y),

View File

@ -602,7 +602,7 @@ public class DrawCellManager {
//pre-existing values
Vector3d playerPosition = EntityUtils.getPosition(Globals.playerEntity);
Vector3i playerChunkPosition = Globals.clientWorldData.convertRealToChunkSpace(playerPosition);
Vector3i playerChunkPosition = Globals.clientWorldData.convertRealToWorldSpace(playerPosition);
//variables used while iterating across chunk positions
double currentRadius = 0; //the current radius is in units of chunks, even though it's a double (it's discrete, not real)

View File

@ -0,0 +1,38 @@
package electrosphere.client.terrain.sampling;
import org.joml.Vector3d;
import org.joml.Vector3i;
import electrosphere.client.terrain.cache.ChunkData;
import electrosphere.engine.Globals;
import electrosphere.entity.Entity;
import electrosphere.entity.EntityUtils;
/**
* Samples voxels
*/
public class ClientVoxelSampler {
/**
* Gets the voxel type beneath an entity
* @param entity The entity
* @return The voxel type
*/
public static int getVoxelTypeBeneathEntity(Entity entity){
return ClientVoxelSampler.getVoxelType(EntityUtils.getPosition(entity));
}
/**
* Gets the voxel type at a given real-space position
* @param realPos The real-space position
* @return The voxel type id
*/
public static int getVoxelType(Vector3d realPos){
Vector3i chunkSpacePos = Globals.clientWorldData.convertRealToWorldSpace(realPos);
Vector3i voxelSpacePos = Globals.clientWorldData.convertRealToVoxelSpace(realPos);
ChunkData chunkData = Globals.clientTerrainManager.getChunkDataAtWorldPoint(chunkSpacePos);
int voxelId = chunkData.getType(voxelSpacePos);
return voxelId;
}
}

View File

@ -9,6 +9,7 @@ import org.joml.Vector3f;
import electrosphere.audio.AudioEngine;
import electrosphere.audio.VirtualAudioSourceManager;
import electrosphere.audio.movement.MovementAudioService;
import electrosphere.auth.AuthenticationManager;
import electrosphere.client.fluid.cells.FluidCellManager;
import electrosphere.client.fluid.manager.ClientFluidManager;
@ -104,6 +105,7 @@ public class Globals {
//
public static AudioEngine audioEngine;
public static VirtualAudioSourceManager virtualAudioSourceManager;
public static MovementAudioService movementAudioService = new MovementAudioService();
//
@ -478,6 +480,7 @@ public class Globals {
Globals.assetManager.addAudioPathToQueue(path);
}
Globals.assetManager.loadAssetsInQueue();
Globals.movementAudioService.init();
}
/**

View File

@ -20,6 +20,8 @@ import electrosphere.server.poseactor.PoseModel;
import electrosphere.util.FileUtils;
import java.io.File;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@ -363,6 +365,14 @@ public class AssetManager {
return rVal;
}
/**
* Gets all audio loaded into the engine
* @return The collection of all audio buffers
*/
public Collection<AudioBuffer> getAllAudio(){
return Collections.unmodifiableCollection(this.audioLoadedIntoMemory.values());
}

View File

@ -1,6 +1,7 @@
package electrosphere.entity.state.movement;
import electrosphere.audio.VirtualAudioSourceManager.VirtualAudioSourceType;
import electrosphere.audio.movement.MovementAudioService.InteractionType;
import electrosphere.client.terrain.sampling.ClientVoxelSampler;
import electrosphere.engine.Globals;
import electrosphere.entity.Entity;
import electrosphere.entity.EntityDataStrings;
@ -11,7 +12,6 @@ import electrosphere.entity.btree.StateTransitionUtil.StateTransitionUtilItem;
import electrosphere.entity.state.AnimationPriorities;
import electrosphere.entity.state.client.firstPerson.FirstPersonTree;
import electrosphere.entity.state.movement.jump.ClientJumpTree;
import electrosphere.game.data.common.TreeDataAudio;
import electrosphere.game.data.creature.type.movement.FallMovementSystem;
import electrosphere.renderer.actor.Actor;
@ -101,19 +101,13 @@ public class FallTree implements BehaviorTree {
}
FirstPersonTree.conditionallyPlayAnimation(parent, fallMovementSystem.getLandState().getAnimation());
}
//play animation, audio, etc, for state
TreeDataAudio audioData = fallMovementSystem.getLandState().getAudioData();
if(parent == Globals.playerEntity && !Globals.controlHandler.cameraIsThirdPerson()){
//first person
Globals.movementAudioService.playAudio(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.LAND);
//play first person audio
if(Globals.audioEngine.initialized() && audioData != null && audioData.getAudioPath() != null){
Globals.virtualAudioSourceManager.createVirtualAudioSource(audioData.getAudioPath(), VirtualAudioSourceType.CREATURE, false);
}
} else {
//play third person audio
if(Globals.audioEngine.initialized() && audioData != null && audioData.getAudioPath() != null){
Globals.virtualAudioSourceManager.createVirtualAudioSource(audioData.getAudioPath(), VirtualAudioSourceType.CREATURE, false, EntityUtils.getPosition(parent));
}
Globals.movementAudioService.playAudioPositional(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.LAND, EntityUtils.getPosition(parent));
}
}
}

View File

@ -4,12 +4,15 @@ package electrosphere.entity.state.movement.groundmove;
import electrosphere.net.synchronization.BehaviorTreeIdEnums;
import electrosphere.entity.state.gravity.GravityUtils;
import electrosphere.audio.movement.MovementAudioService.InteractionType;
import electrosphere.client.terrain.sampling.ClientVoxelSampler;
import electrosphere.collision.PhysicsEntityUtils;
import electrosphere.collision.collidable.Collidable;
import electrosphere.engine.Globals;
import electrosphere.entity.types.collision.CollisionObjUtils;
import electrosphere.entity.types.creature.CreatureUtils;
import electrosphere.game.data.creature.type.movement.GroundMovementSystem;
import electrosphere.logger.LoggerInterface;
import electrosphere.entity.Entity;
import electrosphere.entity.EntityDataStrings;
import electrosphere.entity.EntityUtils;
@ -105,6 +108,10 @@ public class ClientGroundMovementTree implements BehaviorTree {
//the vector controling the direction the entity will move in
Vector3d movementVector = new Vector3d(1,0,0);
//Tracks whether footstep audio has been played or not
boolean playedFootstepFirst = false;
boolean playedFootstepSecond = false;
/**
* Constructor
* @param e The parent entity
@ -288,18 +295,20 @@ public class ClientGroundMovementTree implements BehaviorTree {
//state machine
switch(state){
case STARTUP: {
String animationToPlay = determineCorrectAnimation();
if(entityActor != null){
String animationToPlay = determineCorrectAnimation();
if(
!entityActor.isPlayingAnimation() || !entityActor.isPlayingAnimation(animationToPlay) &&
(jumpTree == null || !jumpTree.isJumping()) &&
(fallTree == null || !fallTree.isFalling())
){
if(!entityActor.isPlayingAnimation(animationToPlay)){
entityActor.playAnimation(animationToPlay,AnimationPriorities.getValue(AnimationPriorities.CORE_MOVEMENT));
entityActor.incrementAnimationTime(0.0001);
//reset footstep tracking
this.playedFootstepFirst = false;
this.playedFootstepSecond = false;
}
FirstPersonTree.conditionallyPlayAnimation(parent, groundMovementData.getAnimationStartup().getNameFirstPerson(), AnimationPriorities.getValue(AnimationPriorities.CORE_MOVEMENT));
}
//conditionally play footstep audio
this.playFootstepAudio(0,entityActor.getAnimationTime(animationToPlay),position);
//run startup code
velocity = velocity + acceleration * (float)Globals.timekeeper.getSimFrameTime();
//check if can transition state
@ -320,20 +329,21 @@ public class ClientGroundMovementTree implements BehaviorTree {
GravityUtils.clientAttemptActivateGravity(parent);
} break;
case MOVE: {
//check if can restart animation
//if yes, restart animation
String animationToPlay = determineCorrectAnimation();
if(entityActor != null){
String animationToPlay = determineCorrectAnimation();
if(
!entityActor.isPlayingAnimation() || !entityActor.isPlayingAnimation(animationToPlay) &&
(jumpTree == null || !jumpTree.isJumping()) &&
(fallTree == null || !fallTree.isFalling())
){
if(!entityActor.isPlayingAnimation(animationToPlay)){
entityActor.playAnimation(animationToPlay,AnimationPriorities.getValue(AnimationPriorities.CORE_MOVEMENT));
entityActor.incrementAnimationTime(0.0001);
//reset footstep tracking
this.playedFootstepFirst = false;
this.playedFootstepSecond = false;
}
FirstPersonTree.conditionallyPlayAnimation(parent, groundMovementData.getAnimationLoop().getNameFirstPerson(), AnimationPriorities.getValue(AnimationPriorities.CORE_MOVEMENT));
}
//conditionally play footstep audio
this.playFootstepAudio(0,entityActor.getAnimationTime(animationToPlay),position);
if(velocity != maxNaturalVelocity){
velocity = maxNaturalVelocity;
CreatureUtils.setVelocity(parent, velocity);
@ -350,18 +360,20 @@ public class ClientGroundMovementTree implements BehaviorTree {
} break;
case SLOWDOWN: {
//run slowdown code
String animationToPlay = determineCorrectAnimation();
if(entityActor != null){
String animationToPlay = determineCorrectAnimation();
if(
!entityActor.isPlayingAnimation() || !entityActor.isPlayingAnimation(animationToPlay) &&
(jumpTree == null || !jumpTree.isJumping()) &&
(fallTree == null || !fallTree.isFalling())
){
if(!entityActor.isPlayingAnimation(animationToPlay)){
entityActor.playAnimation(animationToPlay,AnimationPriorities.getValue(AnimationPriorities.CORE_MOVEMENT));
entityActor.incrementAnimationTime(0.0001);
//reset footstep tracking
this.playedFootstepFirst = false;
this.playedFootstepSecond = false;
}
FirstPersonTree.conditionallyPlayAnimation(parent, groundMovementData.getAnimationWindDown().getNameFirstPerson(), AnimationPriorities.getValue(AnimationPriorities.CORE_MOVEMENT));
}
//conditionally play footstep audio
this.playFootstepAudio(0,entityActor.getAnimationTime(animationToPlay),position);
//velocity stuff
velocity = velocity - acceleration * (float)Globals.timekeeper.getSimFrameTime();
//check if can transition state
@ -369,7 +381,7 @@ public class ClientGroundMovementTree implements BehaviorTree {
velocity = 0;
state = MovementTreeState.IDLE;
if(entityActor != null){
String animationToPlay = determineCorrectAnimation();
animationToPlay = determineCorrectAnimation();
if(entityActor.isPlayingAnimation() && entityActor.isPlayingAnimation(animationToPlay)){
entityActor.stopAnimation(animationToPlay);
}
@ -443,6 +455,36 @@ public class ClientGroundMovementTree implements BehaviorTree {
this.fallTree = fallTree;
}
/**
* Conditionally plays audio for footsteps
* @param voxelType The voxel type
* @param animationTime The time of the current animation
* @param position The position of the parent entity
*/
private void playFootstepAudio(int voxelType, double animationTime, Vector3d position){
Float firstOffset = this.groundMovementData.getFootstepFirstAudioOffset();
Float secondOffset = this.groundMovementData.getFootstepSecondAudioOffset();
if(!this.playedFootstepFirst && firstOffset != null && animationTime > firstOffset){
this.playedFootstepFirst = true;
if(parent == Globals.playerEntity && !Globals.controlHandler.cameraIsThirdPerson()){
Globals.movementAudioService.playAudio(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.STEP_BARE_REG);
} else {
Globals.movementAudioService.playAudioPositional(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.STEP_BARE_REG, position);
}
}
if(!this.playedFootstepSecond && secondOffset != null && animationTime > secondOffset){
this.playedFootstepSecond = true;
if(parent == Globals.playerEntity && !Globals.controlHandler.cameraIsThirdPerson()){
Globals.movementAudioService.playAudio(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.STEP_BARE_REG);
} else {
Globals.movementAudioService.playAudioPositional(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.STEP_BARE_REG, position);
}
}
if(firstOffset == null || secondOffset == null){
LoggerInterface.loggerAudio.WARNING("Footstep offset undefined for creature! " + CreatureUtils.getType(parent));
}
}
/**
* <p> Automatically generated </p>
* <p>

View File

@ -3,6 +3,7 @@ package electrosphere.game.data;
import java.util.LinkedList;
import java.util.List;
import electrosphere.game.data.audio.SurfaceAudioCollection;
import electrosphere.game.data.creature.type.CreatureData;
import electrosphere.game.data.creature.type.CreatureTypeLoader;
import electrosphere.game.data.creature.type.attack.AttackMoveResolver;
@ -40,6 +41,11 @@ public class Config {
//the hints that are defined
HintDefinition hintData;
/**
* The surface audio definitions
*/
SurfaceAudioCollection surfaceAudioCollection;
/**
* Loads the default data
* @return The config
@ -56,6 +62,7 @@ public class Config {
config.voxelData = FileUtils.loadObjectFromAssetPath("Data/voxelTypes.json", VoxelData.class);
config.projectileTypeHolder = FileUtils.loadObjectFromAssetPath("Data/projectile.json", ProjectileTypeHolder.class);
config.hintData = FileUtils.loadObjectFromAssetPath("Data/tutorial/hints.json", HintDefinition.class);
config.surfaceAudioCollection = FileUtils.loadObjectFromAssetPath("Data/audio/surface.json", SurfaceAudioCollection.class);
config.projectileTypeHolder.init();
//validate
@ -226,4 +233,12 @@ public class Config {
return hintData;
}
/**
* Gets the surface audio collection
* @return The surface audio collection
*/
public SurfaceAudioCollection getSurfaceAudioCollection(){
return this.surfaceAudioCollection;
}
}

View File

@ -0,0 +1,49 @@
package electrosphere.game.data.audio;
import java.util.List;
/**
* Audio for different types of interactions with the ground
*/
public class SurfaceAudioCollection {
/**
* The audio to play by default (ie for undefined surfaces)
*/
SurfaceAudioType defaultSurfaceAudio;
/**
* The audio to play for different collections of surfaces
*/
List<SurfaceAudioType> surfaceAudio;
/**
* The voxel types to not play audio for
*/
List<Integer> ignoredVoxelTypes;
/**
* Gets the default audio for all surfaces
* @return The default audio
*/
public SurfaceAudioType getDefaultSurfaceAudio(){
return this.defaultSurfaceAudio;
}
/**
* Gets the audio to play for different surfaces
* @return The list of surface audio data
*/
public List<SurfaceAudioType> getSurfaceAudio(){
return this.surfaceAudio;
}
/**
* Gets the list of voxel types to not play audio for
* @return The list of voxel types
*/
public List<Integer> getIgnoredVoxelTypes(){
return this.ignoredVoxelTypes;
}
}

View File

@ -0,0 +1,101 @@
package electrosphere.game.data.audio;
import java.util.List;
/**
* Audio data for a specific surface type
*/
public class SurfaceAudioType {
/**
* The voxel types that this audio applies to
*/
List<Integer> voxelTypeIds;
/**
* The audio paths for sound effects to play for regular footsteps on these voxel types
*/
List<String> footstepRegularBareAudioPaths;
/**
* The audio paths for sound effects to play for heavy footsteps on these voxel types
*/
List<String> footstepHeavyBareAudioPaths;
/**
* The audio paths for sound effects to play for regular footsteps on these voxel types
*/
List<String> footstepRegularShoeAudioPaths;
/**
* The audio paths for sound effects to play for heavy footsteps on these voxel types
*/
List<String> footstepHeavyShoeAudioPaths;
/**
* The audio paths for sound effects to play for jumps beginning on these voxel types
*/
List<String> jumpAudioPaths;
/**
* The audio paths for sound effects to play for landing animations on these voxel types
*/
List<String> landAudioPaths;
/**
* Gets the voxels ids that use this audio
* @return The list of voxel ids
*/
public List<Integer> getVoxelTypeIds(){
return voxelTypeIds;
}
/**
* Gets the audio to play for regular, bare footsteps on these surfaces
* @return The audio file paths
*/
public List<String> getFootstepRegularBareAudioPaths(){
return footstepRegularBareAudioPaths;
}
/**
* Gets the audio to play for heavy, bare footsteps on these surfaces
* @return The audio file paths
*/
public List<String> getFootstepHeavyBareAudioPaths(){
return footstepHeavyBareAudioPaths;
}
/**
* Gets the audio to play for regular, shoed footsteps on these surfaces
* @return The audio file paths
*/
public List<String> getFootstepRegularShoeAudioPaths(){
return footstepRegularShoeAudioPaths;
}
/**
* Gets the audio to play for heavy, shoed footsteps on these surfaces
* @return The audio file paths
*/
public List<String> getFootstepHeavyShoeAudioPaths(){
return footstepHeavyShoeAudioPaths;
}
/**
* Gets the audio to play for jumping from these surfaces
* @return The audio file paths
*/
public List<String> getJumpAudioPaths(){
return jumpAudioPaths;
}
/**
* Gets the audio to play for landing on these surfaces
* @return The audio file paths
*/
public List<String> getLandAudioPaths(){
return landAudioPaths;
}
}

View File

@ -36,6 +36,16 @@ public class GroundMovementSystem implements MovementSystem {
//sprint data
SprintSystem sprintSystem;
/**
* The offset into the main animation to play the first footstep sound
*/
Float footstepFirstAudioOffset;
/**
* The offset into the main animation to play the second footstep sound
*/
Float footstepSecondAudioOffset;
/**
* Gets the acceleration factor for this creature
@ -101,6 +111,22 @@ public class GroundMovementSystem implements MovementSystem {
return this.backpedalMultiplier;
}
/**
* Gets the offset into the main animation to play the audio for the first footstep
* @return The offset into the audio
*/
public Float getFootstepFirstAudioOffset(){
return footstepFirstAudioOffset;
}
/**
* Gets the offset into the main animation to play the audio for the second footstep
* @return The offset into the audio
*/
public Float getFootstepSecondAudioOffset(){
return footstepSecondAudioOffset;
}
@Override
public String getType() {
return type;

View File

@ -0,0 +1,76 @@
package electrosphere.menu.debug;
import electrosphere.audio.AudioBuffer;
import electrosphere.audio.VirtualAudioSource;
import electrosphere.engine.Globals;
import electrosphere.renderer.ui.imgui.ImGuiWindow;
import electrosphere.renderer.ui.imgui.ImGuiWindow.ImGuiWindowCallback;
import imgui.ImGui;
import imgui.type.ImString;
/**
* Audio debug menus
*/
public class ImGuiAudio {
//audio debug menu
public static ImGuiWindow audioDebugMenu;
//the audio buffer list filter
private static ImString bufferListFilter = new ImString();
/**
* Create audio debug menu
*/
public static void createAudioDebugMenu(){
audioDebugMenu = new ImGuiWindow("Audio");
audioDebugMenu.setCallback(new ImGuiWindowCallback() {
@Override
public void exec() {
//audio engine details
ImGui.text("Audio Engine Details");
ImGui.text("Virtual Audio Source Manager Details");
ImGui.text("Total number active virtual sources: " + Globals.virtualAudioSourceManager.getSourceQueue().size());
if(ImGui.collapsingHeader("Engine Configuration")){
ImGui.text("Current audio device: " + Globals.audioEngine.getDevice());
ImGui.text("Default audio device: " + Globals.audioEngine.getDefaultDevice());
ImGui.text("Has HRTF: " + Globals.audioEngine.getHRTFStatus());
ImGui.text("Listener location: " + Globals.audioEngine.getListener().getPosition());
ImGui.text("Listener eye vector: " + Globals.audioEngine.getListener().getEyeVector());
ImGui.text("Listener up vector: " + Globals.audioEngine.getListener().getUpVector());
}
//only active children
if(ImGui.collapsingHeader("Mapped Virtual Sources")){
for(VirtualAudioSource source : Globals.virtualAudioSourceManager.getMappedSources()){
ImGui.text("Source " + source.getPriority());
ImGui.text(" - Position " + source.getPosition());
ImGui.text(" - Gain " + source.getGain());
ImGui.text(" - Type " + source.getType());
ImGui.text(" - Total time played " + source.getTotalTimePlayed());
ImGui.text(" - Buffer Lenth " + source.getBufferLength());
}
}
//all virtual children
if(ImGui.collapsingHeader("All Virtual Sources")){
for(VirtualAudioSource source : Globals.virtualAudioSourceManager.getSourceQueue()){
ImGui.text("Position " + source.getPosition());
}
}
//all buffers loaded into memory and stats about them
if(ImGui.collapsingHeader("All Loaded Audio Buffers")){
if(ImGui.inputText("Filter", bufferListFilter)){
}
for(AudioBuffer buffer : Globals.assetManager.getAllAudio()){
if(buffer.getFilePath().contains(bufferListFilter.get()) && ImGui.collapsingHeader(buffer.getFilePath())){
ImGui.text("Length: " + buffer.getLength());
}
}
}
}
});
audioDebugMenu.setOpen(false);
Globals.renderingEngine.getImGuiPipeline().addImGuiWindow(audioDebugMenu);
}
}

View File

@ -5,7 +5,6 @@ import java.util.Map;
import org.ode4j.ode.DBody;
import electrosphere.audio.VirtualAudioSource;
import electrosphere.collision.PhysicsEntityUtils;
import electrosphere.controls.ControlHandler.ControlsState;
import electrosphere.engine.Globals;
@ -37,11 +36,6 @@ public class ImGuiWindowMacros {
private static ImGuiLinePlot globalFrametimePlot;
private static Map<String,ImGuiLinePlotDataset> globalFrametimeDatasets;
//audio debug menu
private static ImGuiWindow audioDebugMenu;
private static boolean showAllVirtualAudioChildren = false;
private static boolean showMappedVirtualAudioChildren = true;
//player entity details
private static ImGuiWindow playerEntityWindow;
@ -54,13 +48,13 @@ public class ImGuiWindowMacros {
public static void initImGuiWindows(){
createMainDebugMenu();
createFramerateGraph();
createAudioDebugMenu();
createPlayerEntityDebugWindow();
createFluidDebugWindow();
ImGuiEntityMacros.createClientEntityWindows();
ImGuiUIFramework.createUIFrameworkWindows();
ImGuiControls.createControlsWindows();
ImGuiAI.createAIDebugWindow();
ImGuiAudio.createAudioDebugMenu();
}
/**
@ -114,65 +108,6 @@ public class ImGuiWindowMacros {
}
}
/**
* Create audio debug menu
*/
private static void createAudioDebugMenu(){
audioDebugMenu = new ImGuiWindow("Audio");
audioDebugMenu.setCallback(new ImGuiWindowCallback() {
@Override
public void exec() {
//audio engine details
ImGui.text("Audio Engine Details");
ImGui.text("Current audio device: " + Globals.audioEngine.getDevice());
ImGui.text("Default audio device: " + Globals.audioEngine.getDefaultDevice());
ImGui.text("Has HRTF: " + Globals.audioEngine.getHRTFStatus());
ImGui.text("Listener location: " + Globals.audioEngine.getListener().getPosition());
ImGui.text("Listener eye vector: " + Globals.audioEngine.getListener().getEyeVector());
ImGui.text("Listener up vector: " + Globals.audioEngine.getListener().getUpVector());
ImGui.text("Virtual Audio Source Manager Details");
ImGui.text("Total number active virtual sources: " + Globals.virtualAudioSourceManager.getSourceQueue().size());
//only active children
if(showMappedVirtualAudioChildren){
ImGui.beginChild("mapped virtual sources");
for(VirtualAudioSource source : Globals.virtualAudioSourceManager.getMappedSources()){
ImGui.text("Source " + source.getPriority());
ImGui.text(" - Position " + source.getPosition());
ImGui.text(" - Gain " + source.getGain());
ImGui.text(" - Type " + source.getType());
ImGui.text(" - Total time played " + source.getTotalTimePlayed());
ImGui.text(" - Buffer Lenth " + source.getBufferLength());
}
ImGui.endChild();
if(ImGui.button("Hide Mapped Virtual Children")){
showMappedVirtualAudioChildren = false;
}
} else {
if(ImGui.button("Show Mapped Virtual Children")){
showMappedVirtualAudioChildren = true;
}
}
//all virtual children
if(showAllVirtualAudioChildren){
ImGui.beginChild("all virtual sources");
for(VirtualAudioSource source : Globals.virtualAudioSourceManager.getSourceQueue()){
ImGui.text("Position " + source.getPosition());
}
ImGui.endChild();
if(ImGui.button("Hide All Virtual Children")){
showAllVirtualAudioChildren = false;
}
} else {
if(ImGui.button("Show All Virtual Children")){
showAllVirtualAudioChildren = true;
}
}
}
});
audioDebugMenu.setOpen(false);
Globals.renderingEngine.getImGuiPipeline().addImGuiWindow(audioDebugMenu);
}
/**
* Create player entity debug menu
@ -266,7 +201,7 @@ public class ImGuiWindowMacros {
}
//show audio debug
if(ImGui.button("Show Audio Debug Menu")){
audioDebugMenu.setOpen(true);
ImGuiAudio.audioDebugMenu.setOpen(true);
}
//show audio debug
if(ImGui.button("Show Player Entity Debug Menu")){

View File

@ -39,6 +39,11 @@ import org.joml.Vector4d;
*/
public class Actor {
/**
* Returned when the current time is requested of an animation that the actor is not playing
*/
public static final int INVALID_ANIMATION = -1;
//the model path of the model backing the actor
String modelPath;
@ -91,13 +96,17 @@ public class Actor {
}
}
public double getAnimationTime(String animationName){
for(ActorAnimationMask mask : animationQueue){
if(mask.getAnimationName().contains(animationName)){
return mask.getTime();
}
/**
* Gets the current time of the given animation that is being played on this actor
* @param animation The animation's name
* @return The time into the animation, -1 if the animation is not being played
*/
public double getAnimationTime(String animation){
ActorAnimationMask mask = this.getAnimationMask(animation);
if(mask != null){
return mask.time;
}
return -1.0f;
return INVALID_ANIMATION;
}
/**
@ -110,7 +119,7 @@ public class Actor {
return false;
}
for(ActorAnimationMask mask : animationQueue){
if(mask.getAnimationName().contains(animationName)){
if(mask.getAnimationName().equals(animationName)){
return true;
}
}
@ -159,9 +168,6 @@ public class Actor {
* @param priority The priority of the animation
*/
public void playAnimation(String animationName, int priority){
// animationTime = 0;
// playingAnimation = true;
// animation = animationName;
Model model = Globals.assetManager.fetchModel(modelPath);
if(model != null && model.getAnimation(animationName) != null){
double length = model.getAnimation(animationName).duration;
@ -366,6 +372,22 @@ public class Actor {
this.animationScalar = animationScalar;
}
/**
* Gets the animation mask for a given animation
* @param animationName The animation's name
* @return The animation mask if the actor is playing the animation, null otherwise
*/
public ActorAnimationMask getAnimationMask(String animationName){
for(ActorAnimationMask mask : this.getAnimationQueue()){
if(mask.animationName.equals(animationName)){
return mask;
} else if(mask.animationName.equalsIgnoreCase(animationName)){
LoggerInterface.loggerEngine.WARNING("Animation mask failed to find, but there is an animation with a very similar name! " + animationName + " vs " + mask.animationName);
}
}
return null;
}
/**
* Applies spatial data on the actor, this includes the model matrix as well as the real world position
* @param modelMatrix The model matrix