More ergonomic spatial audio (#9800)

# Objective

Spatial audio was heroically thrown together at the last minute for Bevy
0.10, but right now it's a bit of a pain to use -- users need to
manually update audio sinks with the position of the listener / emitter.

Hopefully the migration guide entry speaks for itself.

## Solution

Add a new `SpatialListener` component and automatically update sinks
with the position of the listener and and emitter.

## Changelog

`SpatialAudioSink`s are now automatically updated with positions of
emitters and listeners.

## Migration Guide

Spatial audio now automatically uses the transform of the `AudioBundle`
and of an entity with a `SpatialListener` component.

If you were manually scaling emitter/listener positions, you can use the
`spatial_scale` field of `AudioPlugin` instead.

```rust

// Old

commands.spawn(
    SpatialAudioBundle {
        source: asset_server.load("sounds/Windless Slopes.ogg"),
        settings: PlaybackSettings::LOOP,
        spatial: SpatialSettings::new(listener_position, gap, emitter_position),
    },
);

fn update(
    emitter_query: Query<(&Transform, &SpatialAudioSink)>,
    listener_query: Query<&Transform, With<Listener>>,
) {
    let listener = listener_query.single();

    for (transform, sink) in &emitter_query {
        sink.set_emitter_position(transform.translation);
        sink.set_listener_position(*listener, gap);
    }
}

// New

commands.spawn((
    SpatialBundle::from_transform(Transform::from_translation(emitter_position)),
    AudioBundle {
        source: asset_server.load("sounds/Windless Slopes.ogg"),
        settings: PlaybackSettings::LOOP.with_spatial(true),
    },
));

commands.spawn((
    SpatialBundle::from_transform(Transform::from_translation(listener_position)),
    SpatialListener::new(gap),
));
```

## Discussion

I removed `SpatialAudioBundle` because the `SpatialSettings` component
was made mostly redundant, and without that it was identical to
`AudioBundle`.

`SpatialListener` is a bare component and not a bundle which is feeling
like a maybe a strange choice. That happened from a natural aversion
both to nested bundles and to duplicating `Transform` etc in bundles and
from figuring that it is likely to just be tacked on to some other
bundle (player, head, camera) most of the time.

Let me know what you think about these things / everything else.

---------

Co-authored-by: Mike <mike.hsu@gmail.com>
This commit is contained in:
Rob Parrett 2023-10-09 12:43:56 -07:00 committed by GitHub
parent 262846e702
commit 39c68e3f92
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 366 additions and 136 deletions

View file

@ -3,7 +3,6 @@ use bevy_asset::{Asset, Handle};
use bevy_derive::{Deref, DerefMut};
use bevy_ecs::prelude::*;
use bevy_math::Vec3;
use bevy_transform::prelude::Transform;
/// Defines the volume to play an audio source at.
#[derive(Clone, Copy, Debug)]
@ -82,6 +81,13 @@ pub struct PlaybackSettings {
/// Useful for "deferred playback", if you want to prepare
/// the entity, but hear the sound later.
pub paused: bool,
/// Enables spatial audio for this source.
///
/// See also: [`SpatialListener`].
///
/// Note: Bevy does not currently support HRTF or any other high-quality 3D sound rendering
/// features. Spatial audio is implemented via simple left-right stereo panning.
pub spatial: bool,
}
impl Default for PlaybackSettings {
@ -98,6 +104,7 @@ impl PlaybackSettings {
volume: Volume::Relative(VolumeLevel(1.0)),
speed: 1.0,
paused: false,
spatial: false,
};
/// Will play the associated audio source in a loop.
@ -106,6 +113,7 @@ impl PlaybackSettings {
volume: Volume::Relative(VolumeLevel(1.0)),
speed: 1.0,
paused: false,
spatial: false,
};
/// Will play the associated audio source once and despawn the entity afterwards.
@ -114,6 +122,7 @@ impl PlaybackSettings {
volume: Volume::Relative(VolumeLevel(1.0)),
speed: 1.0,
paused: false,
spatial: false,
};
/// Will play the associated audio source once and remove the audio components afterwards.
@ -122,6 +131,7 @@ impl PlaybackSettings {
volume: Volume::Relative(VolumeLevel(1.0)),
speed: 1.0,
paused: false,
spatial: false,
};
/// Helper to start in a paused state.
@ -141,30 +151,41 @@ impl PlaybackSettings {
self.speed = speed;
self
}
/// Helper to enable or disable spatial audio.
pub const fn with_spatial(mut self, spatial: bool) -> Self {
self.spatial = spatial;
self
}
}
/// Settings for playing spatial audio.
/// Settings for the listener for spatial audio sources.
///
/// Note: Bevy does not currently support HRTF or any other high-quality 3D sound rendering
/// features. Spatial audio is implemented via simple left-right stereo panning.
/// This must be accompanied by `Transform` and `GlobalTransform`.
/// Only one entity with a `SpatialListener` should be present at any given time.
#[derive(Component, Clone, Debug)]
pub struct SpatialSettings {
pub(crate) left_ear: [f32; 3],
pub(crate) right_ear: [f32; 3],
pub(crate) emitter: [f32; 3],
pub struct SpatialListener {
/// Left ear position relative to the `GlobalTransform`.
pub left_ear_offset: Vec3,
/// Right ear position relative to the `GlobalTransform`.
pub right_ear_offset: Vec3,
}
impl SpatialSettings {
/// Configure spatial audio coming from the `emitter` position and heard by a `listener`.
impl Default for SpatialListener {
fn default() -> Self {
Self::new(4.)
}
}
impl SpatialListener {
/// Creates a new `SpatialListener` component.
///
/// The `listener` transform provides the position and rotation where the sound is to be
/// heard from. `gap` is the distance between the left and right "ears" of the listener.
/// `emitter` is the position where the sound comes from.
pub fn new(listener: Transform, gap: f32, emitter: Vec3) -> Self {
SpatialSettings {
left_ear: (listener.translation + listener.left() * gap / 2.0).to_array(),
right_ear: (listener.translation + listener.right() * gap / 2.0).to_array(),
emitter: emitter.to_array(),
/// `gap` is the distance between the left and right "ears" of the listener. Ears are
/// positioned on the x axis.
pub fn new(gap: f32) -> Self {
SpatialListener {
left_ear_offset: Vec3::X * gap / -2.0,
right_ear_offset: Vec3::X * gap / 2.0,
}
}
}
@ -187,12 +208,37 @@ impl GlobalVolume {
}
}
/// The scale factor applied to the positions of audio sources and listeners for
/// spatial audio.
///
/// You may need to adjust this scale to fit your world's units.
///
/// Default is `Vec3::ONE`.
#[derive(Resource, Clone, Copy)]
pub struct SpatialScale(pub Vec3);
impl SpatialScale {
/// Create a new `SpatialScale` with the same value for all 3 dimensions.
pub fn new(scale: f32) -> Self {
Self(Vec3::splat(scale))
}
/// Create a new `SpatialScale` with the same value for `x` and `y`, and `0.0`
/// for `z`.
pub fn new_2d(scale: f32) -> Self {
Self(Vec3::new(scale, scale, 0.0))
}
}
impl Default for SpatialScale {
fn default() -> Self {
Self(Vec3::ONE)
}
}
/// Bundle for playing a standard bevy audio asset
pub type AudioBundle = AudioSourceBundle<AudioSource>;
/// Bundle for playing a standard bevy audio asset with a 3D position
pub type SpatialAudioBundle = SpatialAudioSourceBundle<AudioSource>;
/// Bundle for playing a sound.
///
/// Insert this bundle onto an entity to trigger a sound source to begin playing.
@ -224,29 +270,3 @@ impl<T: Decodable + Asset> Default for AudioSourceBundle<T> {
}
}
}
/// Bundle for playing a sound with a 3D position.
///
/// Insert this bundle onto an entity to trigger a sound source to begin playing.
///
/// If the handle refers to an unavailable asset (such as if it has not finished loading yet),
/// the audio will not begin playing immediately. The audio will play when the asset is ready.
///
/// When Bevy begins the audio playback, a [`SpatialAudioSink`][crate::SpatialAudioSink]
/// component will be added to the entity. You can use that component to control the audio
/// settings during playback.
#[derive(Bundle)]
pub struct SpatialAudioSourceBundle<Source = AudioSource>
where
Source: Asset + Decodable,
{
/// Asset containing the audio data to play.
pub source: Handle<Source>,
/// Initial settings that the audio starts playing with.
/// If you would like to control the audio while it is playing,
/// query for the [`SpatialAudioSink`][crate::SpatialAudioSink] component.
/// Changes to this component will *not* be applied to already-playing audio.
pub settings: PlaybackSettings,
/// Spatial audio configuration. Specifies the positions of the source and listener.
pub spatial: SpatialSettings,
}

View file

@ -1,9 +1,11 @@
use crate::{
AudioSourceBundle, Decodable, GlobalVolume, PlaybackMode, PlaybackSettings, SpatialAudioSink,
SpatialAudioSourceBundle, SpatialSettings, Volume,
SpatialListener, SpatialScale, Volume,
};
use bevy_asset::{Asset, Assets, Handle};
use bevy_ecs::prelude::*;
use bevy_ecs::{prelude::*, system::SystemParam};
use bevy_math::Vec3;
use bevy_transform::prelude::GlobalTransform;
use bevy_utils::tracing::warn;
use rodio::{OutputStream, OutputStreamHandle, Sink, Source, SpatialSink};
@ -51,11 +53,48 @@ pub struct PlaybackDespawnMarker;
#[derive(Component)]
pub struct PlaybackRemoveMarker;
#[derive(SystemParam)]
pub(crate) struct EarPositions<'w, 's> {
pub(crate) query: Query<'w, 's, (Entity, &'static GlobalTransform, &'static SpatialListener)>,
pub(crate) scale: Res<'w, SpatialScale>,
}
impl<'w, 's> EarPositions<'w, 's> {
/// Gets a set of transformed and scaled ear positions.
///
/// If there are no listeners, use the default values. If a user has added multiple
/// listeners for whatever reason, we will return the first value.
pub(crate) fn get(&self) -> (Vec3, Vec3) {
let (left_ear, right_ear) = self
.query
.iter()
.next()
.map(|(_, transform, settings)| {
(
transform.transform_point(settings.left_ear_offset) * self.scale.0,
transform.transform_point(settings.right_ear_offset) * self.scale.0,
)
})
.unwrap_or_else(|| {
let settings = SpatialListener::default();
(
(settings.left_ear_offset * self.scale.0),
(settings.right_ear_offset * self.scale.0),
)
});
(left_ear, right_ear)
}
pub(crate) fn multiple_listeners(&self) -> bool {
self.query.iter().len() > 1
}
}
/// Plays "queued" audio through the [`AudioOutput`] resource.
///
/// "Queued" audio is any audio entity (with the components from
/// [`AudioBundle`][crate::AudioBundle] or [`SpatialAudioBundle`][crate::SpatialAudioBundle])
/// that does not have an [`AudioSink`]/[`SpatialAudioSink`] component.
/// [`AudioBundle`][crate::AudioBundle] that does not have an
/// [`AudioSink`]/[`SpatialAudioSink`] component.
///
/// This system detects such entities, checks if their source asset
/// data is available, and creates/inserts the sink.
@ -68,10 +107,11 @@ pub(crate) fn play_queued_audio_system<Source: Asset + Decodable>(
Entity,
&Handle<Source>,
&PlaybackSettings,
Option<&SpatialSettings>,
Option<&GlobalTransform>,
),
(Without<AudioSink>, Without<SpatialAudioSink>),
>,
ear_positions: EarPositions,
mut commands: Commands,
) where
f32: rodio::cpal::FromSample<Source::DecoderItem>,
@ -81,15 +121,33 @@ pub(crate) fn play_queued_audio_system<Source: Asset + Decodable>(
return;
};
for (entity, source_handle, settings, spatial) in &query_nonplaying {
for (entity, source_handle, settings, maybe_emitter_transform) in &query_nonplaying {
if let Some(audio_source) = audio_sources.get(source_handle) {
// audio data is available (has loaded), begin playback and insert sink component
if let Some(spatial) = spatial {
if settings.spatial {
let (left_ear, right_ear) = ear_positions.get();
// We can only use one `SpatialListener`. If there are more than that, then
// the user may have made a mistake.
if ear_positions.multiple_listeners() {
warn!(
"Multiple SpatialListeners found. Using {:?}.",
ear_positions.query.iter().next().unwrap().0
);
}
let emitter_translation = maybe_emitter_transform
.map(|t| (t.translation() * ear_positions.scale.0).into())
.unwrap_or_else(|| {
warn!("Spatial AudioBundle with no GlobalTransform component. Using zero.");
Vec3::ZERO.into()
});
match SpatialSink::try_new(
stream_handle,
spatial.emitter,
spatial.left_ear,
spatial.right_ear,
emitter_translation,
left_ear.into(),
right_ear.into(),
) {
Ok(sink) => {
sink.set_speed(settings.speed);
@ -216,11 +274,9 @@ pub(crate) fn cleanup_finished_audio<T: Decodable + Asset>(
}
for (entity, sink) in &query_spatial_remove {
if sink.sink.empty() {
commands.entity(entity).remove::<(
SpatialAudioSourceBundle<T>,
SpatialAudioSink,
PlaybackRemoveMarker,
)>();
commands
.entity(entity)
.remove::<(AudioSourceBundle<T>, SpatialAudioSink, PlaybackRemoveMarker)>();
}
}
}
@ -229,3 +285,37 @@ pub(crate) fn cleanup_finished_audio<T: Decodable + Asset>(
pub(crate) fn audio_output_available(audio_output: Res<AudioOutput>) -> bool {
audio_output.stream_handle.is_some()
}
/// Updates spatial audio sinks when emitter positions change.
pub(crate) fn update_emitter_positions(
mut emitters: Query<(&mut GlobalTransform, &SpatialAudioSink), Changed<GlobalTransform>>,
spatial_scale: Res<SpatialScale>,
) {
for (transform, sink) in emitters.iter_mut() {
let translation = transform.translation() * spatial_scale.0;
sink.set_emitter_position(translation);
}
}
/// Updates spatial audio sink ear positions when spatial listeners change.
pub(crate) fn update_listener_positions(
mut emitters: Query<&SpatialAudioSink>,
changed_listener: Query<
(),
(
Or<(Changed<SpatialListener>, Changed<GlobalTransform>)>,
With<SpatialListener>,
),
>,
ear_positions: EarPositions,
) {
if !ear_positions.scale.is_changed() && changed_listener.is_empty() {
return;
}
let (left_ear, right_ear) = ear_positions.get();
for sink in emitters.iter_mut() {
sink.set_ears_position(left_ear, right_ear);
}
}

View file

@ -35,8 +35,7 @@ pub mod prelude {
#[doc(hidden)]
pub use crate::{
AudioBundle, AudioSink, AudioSinkPlayback, AudioSource, AudioSourceBundle, Decodable,
GlobalVolume, Pitch, PitchBundle, PlaybackSettings, SpatialAudioBundle, SpatialAudioSink,
SpatialAudioSourceBundle, SpatialPitchBundle, SpatialSettings,
GlobalVolume, Pitch, PitchBundle, PlaybackSettings, SpatialAudioSink, SpatialListener,
};
}
@ -52,6 +51,7 @@ pub use sinks::*;
use bevy_app::prelude::*;
use bevy_asset::{Asset, AssetApp};
use bevy_ecs::prelude::*;
use bevy_transform::TransformSystem;
use audio_output::*;
@ -61,17 +61,26 @@ struct AudioPlaySet;
/// Adds support for audio playback to a Bevy Application
///
/// Insert an [`AudioBundle`] or [`SpatialAudioBundle`] onto your entities to play audio.
/// Insert an [`AudioBundle`] onto your entities to play audio.
#[derive(Default)]
pub struct AudioPlugin {
/// The global volume for all audio entities with a [`Volume::Relative`] volume.
pub global_volume: GlobalVolume,
/// The scale factor applied to the positions of audio sources and listeners for
/// spatial audio.
pub spatial_scale: SpatialScale,
}
impl Plugin for AudioPlugin {
fn build(&self, app: &mut App) {
app.insert_resource(self.global_volume)
.configure_sets(PostUpdate, AudioPlaySet.run_if(audio_output_available))
.insert_resource(self.spatial_scale)
.configure_sets(
PostUpdate,
AudioPlaySet
.run_if(audio_output_available)
.after(TransformSystem::TransformPropagate), // For spatial audio transforms
)
.init_resource::<AudioOutput>();
#[cfg(any(feature = "mp3", feature = "flac", feature = "wav", feature = "vorbis"))]
@ -95,6 +104,8 @@ impl AddAudioSource for App {
play_queued_audio_system::<T>.in_set(AudioPlaySet),
);
self.add_systems(PostUpdate, cleanup_finished_audio::<T>.in_set(AudioPlaySet));
self.add_systems(PostUpdate, update_emitter_positions.in_set(AudioPlaySet));
self.add_systems(PostUpdate, update_listener_positions.in_set(AudioPlaySet));
self
}
}

View file

@ -1,4 +1,4 @@
use crate::{AudioSourceBundle, Decodable, SpatialAudioSourceBundle};
use crate::{AudioSourceBundle, Decodable};
use bevy_asset::Asset;
use bevy_reflect::TypePath;
use rodio::{source::SineWave, source::TakeDuration, Source};
@ -33,6 +33,3 @@ impl Decodable for Pitch {
/// Bundle for playing a bevy note sound
pub type PitchBundle = AudioSourceBundle<Pitch>;
/// Bundle for playing a bevy note sound with a 3D position
pub type SpatialPitchBundle = SpatialAudioSourceBundle<Pitch>;

View file

@ -128,8 +128,8 @@ impl AudioSinkPlayback for AudioSink {
/// Used to control spatial audio during playback.
///
/// Bevy inserts this component onto your entities when it begins playing an audio source.
/// Use [`SpatialAudioBundle`][crate::SpatialAudioBundle] to trigger that to happen.
/// Bevy inserts this component onto your entities when it begins playing an audio source
/// that's configured to use spatial audio.
///
/// You can use this component to modify the playback settings while the audio is playing.
///

View file

@ -86,6 +86,7 @@ fn main() {
// register the audio source so that it can be used
app.add_plugins(DefaultPlugins.set(AudioPlugin {
global_volume: GlobalVolume::new(0.2),
..default()
}))
.add_audio_source::<SineAudio>()
.add_systems(Startup, setup)

View file

@ -1,18 +1,27 @@
//! This example illustrates how to load and play an audio file, and control where the sounds seems to come from.
use bevy::{prelude::*, sprite::MaterialMesh2dBundle};
use bevy::{
audio::{AudioPlugin, SpatialScale},
prelude::*,
sprite::MaterialMesh2dBundle,
};
/// Spatial audio uses the distance to attenuate the sound volume. In 2D with the default camera,
/// 1 pixel is 1 unit of distance, so we use a scale so that 100 pixels is 1 unit of distance for
/// audio.
const AUDIO_SCALE: f32 = 1. / 100.0;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_plugins(DefaultPlugins.set(AudioPlugin {
spatial_scale: SpatialScale::new_2d(AUDIO_SCALE),
..default()
}))
.add_systems(Startup, setup)
.add_systems(Update, update_positions)
.add_systems(Update, update_emitters)
.add_systems(Update, update_listener)
.run();
}
/// Spatial audio uses the distance to attenuate the sound volume. In 2D with the default camera, 1 pixel is 1 unit of distance,
/// so we use a scale so that 100 pixels is 1 unit of distance for audio.
const AUDIO_SCALE: f32 = 100.0;
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
@ -30,51 +39,101 @@ fn setup(
transform: Transform::from_translation(Vec3::new(0.0, 50.0, 0.0)),
..default()
},
Emitter,
SpatialAudioBundle {
Emitter::default(),
AudioBundle {
source: asset_server.load("sounds/Windless Slopes.ogg"),
settings: PlaybackSettings::LOOP,
spatial: SpatialSettings::new(Transform::IDENTITY, gap / AUDIO_SCALE, Vec3::ZERO),
settings: PlaybackSettings::LOOP.with_spatial(true),
},
));
// left ear
commands.spawn(SpriteBundle {
sprite: Sprite {
color: Color::RED,
custom_size: Some(Vec2::splat(20.0)),
..default()
},
transform: Transform::from_xyz(-gap / 2.0, 0.0, 0.0),
..default()
});
let listener = SpatialListener::new(gap);
commands
.spawn((SpatialBundle::default(), listener.clone()))
.with_children(|parent| {
// left ear
parent.spawn(SpriteBundle {
sprite: Sprite {
color: Color::RED,
custom_size: Some(Vec2::splat(20.0)),
..default()
},
transform: Transform::from_xyz(-gap / 2.0, 0.0, 0.0),
..default()
});
// right ear
commands.spawn(SpriteBundle {
sprite: Sprite {
color: Color::GREEN,
custom_size: Some(Vec2::splat(20.0)),
// right ear
parent.spawn(SpriteBundle {
sprite: Sprite {
color: Color::GREEN,
custom_size: Some(Vec2::splat(20.0)),
..default()
},
transform: Transform::from_xyz(gap / 2.0, 0.0, 0.0),
..default()
});
});
// example instructions
commands.spawn(
TextBundle::from_section(
"Up/Down/Left/Right: Move Listener\nSpace: Toggle Emitter Movement",
TextStyle {
font_size: 20.0,
..default()
},
)
.with_style(Style {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
transform: Transform::from_xyz(gap / 2.0, 0.0, 0.0),
..default()
});
}),
);
// camera
commands.spawn(Camera2dBundle::default());
}
#[derive(Component)]
struct Emitter;
#[derive(Component, Default)]
struct Emitter {
stopped: bool,
}
fn update_positions(
fn update_emitters(
time: Res<Time>,
mut emitters: Query<(&mut Transform, Option<&SpatialAudioSink>), With<Emitter>>,
mut emitters: Query<(&mut Transform, &mut Emitter), With<Emitter>>,
keyboard: Res<Input<KeyCode>>,
) {
for (mut emitter_transform, sink) in emitters.iter_mut() {
emitter_transform.translation.x = time.elapsed_seconds().sin() * 500.0;
if let Some(sink) = &sink {
sink.set_emitter_position(emitter_transform.translation / AUDIO_SCALE);
for (mut emitter_transform, mut emitter) in emitters.iter_mut() {
if keyboard.just_pressed(KeyCode::Space) {
emitter.stopped = !emitter.stopped;
}
if !emitter.stopped {
emitter_transform.translation.x = time.elapsed_seconds().sin() * 500.0;
}
}
}
fn update_listener(
keyboard: Res<Input<KeyCode>>,
time: Res<Time>,
mut listeners: Query<&mut Transform, With<SpatialListener>>,
) {
let mut transform = listeners.single_mut();
let speed = 200.;
if keyboard.pressed(KeyCode::Right) {
transform.translation.x += speed * time.delta_seconds();
}
if keyboard.pressed(KeyCode::Left) {
transform.translation.x -= speed * time.delta_seconds();
}
if keyboard.pressed(KeyCode::Up) {
transform.translation.y += speed * time.delta_seconds();
}
if keyboard.pressed(KeyCode::Down) {
transform.translation.y -= speed * time.delta_seconds();
}
}

View file

@ -6,6 +6,7 @@ fn main() {
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, update_positions)
.add_systems(Update, update_listener)
.run();
}
@ -29,29 +30,33 @@ fn setup(
transform: Transform::from_xyz(0.0, 0.0, 0.0),
..default()
},
Emitter,
SpatialAudioBundle {
Emitter::default(),
AudioBundle {
source: asset_server.load("sounds/Windless Slopes.ogg"),
settings: PlaybackSettings::LOOP,
spatial: SpatialSettings::new(Transform::IDENTITY, gap, Vec3::ZERO),
settings: PlaybackSettings::LOOP.with_spatial(true),
},
));
// left ear
commands.spawn(PbrBundle {
mesh: meshes.add(Mesh::from(shape::Cube { size: 0.2 })),
material: materials.add(Color::RED.into()),
transform: Transform::from_xyz(-gap / 2.0, 0.0, 0.0),
..default()
});
let listener = SpatialListener::new(gap);
commands
.spawn((SpatialBundle::default(), listener.clone()))
.with_children(|parent| {
// left ear indicator
parent.spawn(PbrBundle {
mesh: meshes.add(Mesh::from(shape::Cube { size: 0.2 })),
material: materials.add(Color::RED.into()),
transform: Transform::from_translation(listener.left_ear_offset),
..default()
});
// right ear
commands.spawn(PbrBundle {
mesh: meshes.add(Mesh::from(shape::Cube { size: 0.2 })),
material: materials.add(Color::GREEN.into()),
transform: Transform::from_xyz(gap / 2.0, 0.0, 0.0),
..default()
});
// right ear indicator
parent.spawn(PbrBundle {
mesh: meshes.add(Mesh::from(shape::Cube { size: 0.2 })),
material: materials.add(Color::GREEN.into()),
transform: Transform::from_translation(listener.right_ear_offset),
..default()
});
});
// light
commands.spawn(PointLightBundle {
@ -63,6 +68,24 @@ fn setup(
transform: Transform::from_xyz(4.0, 8.0, 4.0),
..default()
});
// example instructions
commands.spawn(
TextBundle::from_section(
"Up/Down/Left/Right: Move Listener\nSpace: Toggle Emitter Movement",
TextStyle {
font_size: 20.0,
..default()
},
)
.with_style(Style {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
}),
);
// camera
commands.spawn(Camera3dBundle {
transform: Transform::from_xyz(0.0, 5.0, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
@ -70,18 +93,47 @@ fn setup(
});
}
#[derive(Component)]
struct Emitter;
#[derive(Component, Default)]
struct Emitter {
stopped: bool,
}
fn update_positions(
time: Res<Time>,
mut emitters: Query<(&mut Transform, Option<&SpatialAudioSink>), With<Emitter>>,
mut emitters: Query<(&mut Transform, &mut Emitter), With<Emitter>>,
keyboard: Res<Input<KeyCode>>,
) {
for (mut emitter_transform, sink) in emitters.iter_mut() {
emitter_transform.translation.x = time.elapsed_seconds().sin() * 3.0;
emitter_transform.translation.z = time.elapsed_seconds().cos() * 3.0;
if let Some(sink) = &sink {
sink.set_emitter_position(emitter_transform.translation);
for (mut emitter_transform, mut emitter) in emitters.iter_mut() {
if keyboard.just_pressed(KeyCode::Space) {
emitter.stopped = !emitter.stopped;
}
if !emitter.stopped {
emitter_transform.translation.x = time.elapsed_seconds().sin() * 3.0;
emitter_transform.translation.z = time.elapsed_seconds().cos() * 3.0;
}
}
}
fn update_listener(
keyboard: Res<Input<KeyCode>>,
time: Res<Time>,
mut listeners: Query<&mut Transform, With<SpatialListener>>,
) {
let mut transform = listeners.single_mut();
let speed = 2.;
if keyboard.pressed(KeyCode::Right) {
transform.translation.x += speed * time.delta_seconds();
}
if keyboard.pressed(KeyCode::Left) {
transform.translation.x -= speed * time.delta_seconds();
}
if keyboard.pressed(KeyCode::Down) {
transform.translation.z += speed * time.delta_seconds();
}
if keyboard.pressed(KeyCode::Up) {
transform.translation.z -= speed * time.delta_seconds();
}
}