Track spatial one-shots against listener movement

This commit is contained in:
Jage9
2026-02-22 21:37:15 -05:00
parent 9b1b1505f0
commit f3a7cc90a7
3 changed files with 77 additions and 17 deletions

View File

@@ -1,5 +1,5 @@
// Maintainer-controlled web client version. // Maintainer-controlled web client version.
// Format: YYYY.MM.DD Rn (example: 2026.02.20 R2) // Format: YYYY.MM.DD Rn (example: 2026.02.20 R2)
window.CHGRID_WEB_VERSION = "2026.02.22 R193"; window.CHGRID_WEB_VERSION = "2026.02.22 R194";
// Optional display timezone for timestamps. Falls back to America/Detroit if unset/invalid. // Optional display timezone for timestamps. Falls back to America/Detroit if unset/invalid.
window.CHGRID_TIME_ZONE = "America/Detroit"; window.CHGRID_TIME_ZONE = "America/Detroit";

View File

@@ -32,6 +32,14 @@ type OutputMode = 'stereo' | 'mono';
const SPATIAL_RAMP_SECONDS = 0.2; const SPATIAL_RAMP_SECONDS = 0.2;
const SPATIAL_TIME_CONSTANT_SECONDS = SPATIAL_RAMP_SECONDS / 3; const SPATIAL_TIME_CONSTANT_SECONDS = SPATIAL_RAMP_SECONDS / 3;
const ONE_SHOT_ATTACK_SECONDS = 0.02; const ONE_SHOT_ATTACK_SECONDS = 0.02;
type ActiveSpatialSampleRuntime = {
sourceX: number;
sourceY: number;
baseGain: number;
gainNode: GainNode;
pannerNode: StereoPannerNode | null;
sourceNode: AudioBufferSourceNode;
};
export class AudioEngine { export class AudioEngine {
private audioCtx: AudioContext | null = null; private audioCtx: AudioContext | null = null;
@@ -39,6 +47,7 @@ export class AudioEngine {
private sfxGainNode: GainNode | null = null; private sfxGainNode: GainNode | null = null;
private readonly sampleCache = new Map<string, AudioBuffer>(); private readonly sampleCache = new Map<string, AudioBuffer>();
private readonly sampleLoaders = new Map<string, Promise<AudioBuffer>>(); private readonly sampleLoaders = new Map<string, Promise<AudioBuffer>>();
private readonly activeSpatialSamples = new Set<ActiveSpatialSampleRuntime>();
private outboundSource: MediaStreamAudioSourceNode | null = null; private outboundSource: MediaStreamAudioSourceNode | null = null;
private outboundInputGain: GainNode | null = null; private outboundInputGain: GainNode | null = null;
@@ -311,6 +320,14 @@ export class AudioEngine {
} }
} }
/** Updates active one-shot spatial sample gain/pan against current listener position. */
updateSpatialSamples(playerPosition: { x: number; y: number }): void {
if (!this.audioCtx) return;
for (const sample of Array.from(this.activeSpatialSamples)) {
this.applySpatialSampleRuntime(sample, playerPosition);
}
}
sfxLocate(peer: { x: number; y: number }): void { sfxLocate(peer: { x: number; y: number }): void {
this.playSound({ freq: 880, duration: 0.2, type: 'sine', gain: 0.5, sourcePosition: peer }); this.playSound({ freq: 880, duration: 0.2, type: 'sine', gain: 0.5, sourcePosition: peer });
} }
@@ -339,34 +356,50 @@ export class AudioEngine {
this.playSound({ freq: 880, duration: 0.12, type: 'sine', gain: 0.45 }); this.playSound({ freq: 880, duration: 0.12, type: 'sine', gain: 0.45 });
} }
async playSpatialSample(url: string, sourcePosition: { x: number; y: number }, gain = 1): Promise<void> { async playSpatialSample(
url: string,
sourcePosition: { x: number; y: number },
playerPosition: { x: number; y: number },
gain = 1,
): Promise<void> {
await this.ensureContext(); await this.ensureContext();
const { audioCtx, sfxGainNode } = this; const { audioCtx, sfxGainNode } = this;
if (!audioCtx || !sfxGainNode) return; if (!audioCtx || !sfxGainNode) return;
const resolved = resolveSpatialMix({
dx: sourcePosition.x,
dy: sourcePosition.y,
range: HEARING_RADIUS,
baseGain: gain,
});
if (!resolved) return;
try { try {
const buffer = await this.getSampleBuffer(url); const buffer = await this.getSampleBuffer(url);
const source = audioCtx.createBufferSource(); const source = audioCtx.createBufferSource();
source.buffer = buffer; source.buffer = buffer;
const gainNode = audioCtx.createGain(); const gainNode = audioCtx.createGain();
gainNode.gain.setValueAtTime(0, audioCtx.currentTime); gainNode.gain.setValueAtTime(0, audioCtx.currentTime);
gainNode.gain.setTargetAtTime(resolved.gain, audioCtx.currentTime, ONE_SHOT_ATTACK_SECONDS);
source.connect(gainNode); source.connect(gainNode);
if (resolved.pan !== undefined && this.supportsStereoPanner() && this.outputMode === 'stereo') { let pannerNode: StereoPannerNode | null = null;
const panner = audioCtx.createStereoPanner(); if (this.supportsStereoPanner() && this.outputMode === 'stereo') {
panner.pan.setValueAtTime(resolved.pan, audioCtx.currentTime); pannerNode = audioCtx.createStereoPanner();
gainNode.connect(panner).connect(sfxGainNode); gainNode.connect(pannerNode).connect(sfxGainNode);
} else { } else {
gainNode.connect(sfxGainNode); gainNode.connect(sfxGainNode);
} }
const runtime: ActiveSpatialSampleRuntime = {
sourceX: sourcePosition.x,
sourceY: sourcePosition.y,
baseGain: gain,
gainNode,
pannerNode,
sourceNode: source,
};
this.activeSpatialSamples.add(runtime);
this.applySpatialSampleRuntime(runtime, playerPosition, true);
source.onended = () => {
this.activeSpatialSamples.delete(runtime);
try {
source.disconnect();
} catch {
// Ignore stale graph disconnects.
}
gainNode.disconnect();
pannerNode?.disconnect();
};
source.start(); source.start();
} catch { } catch {
// Ignore sample decode/load errors. // Ignore sample decode/load errors.
@@ -523,6 +556,31 @@ export class AudioEngine {
oscillator.stop(startTime + spec.duration); oscillator.stop(startTime + spec.duration);
} }
private applySpatialSampleRuntime(
sample: ActiveSpatialSampleRuntime,
playerPosition: { x: number; y: number },
initial = false,
): void {
if (!this.audioCtx) return;
const mix = resolveSpatialMix({
dx: sample.sourceX - playerPosition.x,
dy: sample.sourceY - playerPosition.y,
range: HEARING_RADIUS,
baseGain: sample.baseGain,
});
const gainValue = mix?.gain ?? 0;
if (initial) {
sample.gainNode.gain.setTargetAtTime(gainValue, this.audioCtx.currentTime, ONE_SHOT_ATTACK_SECONDS);
} else {
sample.gainNode.gain.setTargetAtTime(gainValue, this.audioCtx.currentTime, SPATIAL_TIME_CONSTANT_SECONDS);
}
if (sample.pannerNode) {
const panValue = mix?.pan ?? 0;
const resolvedPan = this.outputMode === 'mono' ? 0 : Math.max(-1, Math.min(1, panValue));
sample.pannerNode.pan.setTargetAtTime(resolvedPan, this.audioCtx.currentTime, SPATIAL_TIME_CONSTANT_SECONDS);
}
}
private async getSampleBuffer(url: string): Promise<AudioBuffer> { private async getSampleBuffer(url: string): Promise<AudioBuffer> {
if (!this.audioCtx) { if (!this.audioCtx) {
throw new Error('Audio context not initialized'); throw new Error('Audio context not initialized');

View File

@@ -1212,6 +1212,7 @@ function gameLoop(): void {
void refreshAudioSubscriptions(); void refreshAudioSubscriptions();
} }
audio.updateSpatialAudio(peerManager.getPeers(), { x: state.player.x, y: state.player.y }); audio.updateSpatialAudio(peerManager.getPeers(), { x: state.player.x, y: state.player.y });
audio.updateSpatialSamples({ x: state.player.x, y: state.player.y });
radioRuntime.updateSpatialAudio(state.items, { x: state.player.x, y: state.player.y }); radioRuntime.updateSpatialAudio(state.items, { x: state.player.x, y: state.player.y });
itemEmitRuntime.updateSpatialAudio(state.items, { x: state.player.x, y: state.player.y }); itemEmitRuntime.updateSpatialAudio(state.items, { x: state.player.x, y: state.player.y });
state.cursorVisible = Math.floor(Date.now() / 500) % 2 === 0; state.cursorVisible = Math.floor(Date.now() / 500) % 2 === 0;
@@ -1470,7 +1471,8 @@ const onAppMessage = createOnMessageHandler({
const gain = url === TELEPORT_START_SOUND_URL ? TELEPORT_START_GAIN : FOOTSTEP_GAIN; const gain = url === TELEPORT_START_SOUND_URL ? TELEPORT_START_GAIN : FOOTSTEP_GAIN;
void audio.playSpatialSample( void audio.playSpatialSample(
url, url,
{ x: peerX - state.player.x, y: peerY - state.player.y }, { x: peerX, y: peerY },
{ x: state.player.x, y: state.player.y },
gain, gain,
); );
}, },
@@ -1496,7 +1498,7 @@ const onAppMessage = createOnMessageHandler({
playLocateToneAt: (x, y) => audio.sfxLocate({ x: x - state.player.x, y: y - state.player.y }), playLocateToneAt: (x, y) => audio.sfxLocate({ x: x - state.player.x, y: y - state.player.y }),
resolveIncomingSoundUrl, resolveIncomingSoundUrl,
playIncomingItemUseSound: (url, x, y) => { playIncomingItemUseSound: (url, x, y) => {
void audio.playSpatialSample(url, { x: x - state.player.x, y: y - state.player.y }, 1); void audio.playSpatialSample(url, { x, y }, { x: state.player.x, y: state.player.y }, 1);
}, },
}); });