Add audio layer toggles and reduce item emit volume

This commit is contained in:
Jage9
2026-02-21 16:30:31 -05:00
parent 3a64f7d38c
commit e0fc98d3f1
6 changed files with 169 additions and 16 deletions

View File

@@ -41,6 +41,7 @@ export class AudioEngine {
private loopbackEnabled = false;
private loopbackRuntime: EffectRuntime | null = null;
private outputMode: OutputMode = 'stereo';
private voiceLayerEnabled = true;
private effectIndex = EFFECT_SEQUENCE.findIndex((effect) => effect.id === 'off');
private readonly effectValues: Record<EffectId, number> = {
reverb: 50,
@@ -173,6 +174,14 @@ export class AudioEngine {
return this.outputMode;
}
setVoiceLayerEnabled(enabled: boolean): void {
this.voiceLayerEnabled = enabled;
}
isVoiceLayerEnabled(): boolean {
return this.voiceLayerEnabled;
}
toggleLoopback(): boolean {
this.loopbackEnabled = !this.loopbackEnabled;
this.rebuildOutboundEffectGraph();
@@ -186,6 +195,7 @@ export class AudioEngine {
): Promise<void> {
await this.ensureContext();
if (!this.audioCtx) return;
this.cleanupPeerAudio(peer);
const audioElement = new Audio();
audioElement.srcObject = stream;
@@ -206,9 +216,13 @@ export class AudioEngine {
let pannerNode: StereoPannerNode | undefined;
if (this.supportsStereoPanner()) {
pannerNode = this.audioCtx.createStereoPanner();
gainNode.connect(pannerNode).connect(this.audioCtx.destination);
if (this.voiceLayerEnabled) {
gainNode.connect(pannerNode).connect(this.audioCtx.destination);
}
} else {
gainNode.connect(this.audioCtx.destination);
if (this.voiceLayerEnabled) {
gainNode.connect(this.audioCtx.destination);
}
}
peer.audioElement = audioElement;
@@ -313,9 +327,16 @@ export class AudioEngine {
}
cleanupPeerAudio(peer: SpatialPeerRuntime): void {
peer.audioElement?.remove();
if (peer.audioElement) {
peer.audioElement.pause();
peer.audioElement.srcObject = null;
peer.audioElement.remove();
}
peer.gain?.disconnect();
peer.panner?.disconnect();
peer.audioElement = undefined;
peer.gain = undefined;
peer.panner = undefined;
}
private rebuildOutboundEffectGraph(): void {

View File

@@ -9,8 +9,11 @@ type EmitOutput = {
panner: StereoPannerNode | null;
};
const ITEM_EMIT_BASE_GAIN = 0.3;
export class ItemEmitRuntime {
private readonly outputs = new Map<string, EmitOutput>();
private layerEnabled = true;
constructor(
private readonly audio: AudioEngine,
@@ -34,7 +37,20 @@ export class ItemEmitRuntime {
}
}
async setLayerEnabled(enabled: boolean, items: Iterable<WorldItem>): Promise<void> {
this.layerEnabled = enabled;
if (!enabled) {
this.cleanupAll();
return;
}
await this.sync(items);
}
async sync(items: Iterable<WorldItem>): Promise<void> {
if (!this.layerEnabled) {
this.cleanupAll();
return;
}
const validIds = new Set<string>();
await this.audio.ensureContext();
const audioCtx = this.audio.context;
@@ -81,6 +97,7 @@ export class ItemEmitRuntime {
}
updateSpatialAudio(items: Map<string, WorldItem>, playerPosition: { x: number; y: number }): void {
if (!this.layerEnabled) return;
const audioCtx = this.audio.context;
if (!audioCtx) return;
@@ -101,7 +118,7 @@ export class ItemEmitRuntime {
gainValue = 1;
panValue = 0;
}
output.gain.gain.linearRampToValueAtTime(gainValue, audioCtx.currentTime + 0.1);
output.gain.gain.linearRampToValueAtTime(gainValue * ITEM_EMIT_BASE_GAIN, audioCtx.currentTime + 0.1);
if (output.panner) {
const resolvedPan = this.audio.getOutputMode() === 'mono' ? 0 : Math.max(-1, Math.min(1, panValue));
output.panner.pan.linearRampToValueAtTime(resolvedPan, audioCtx.currentTime + 0.1);

View File

@@ -110,6 +110,7 @@ function connectRadioChannelSource(
export class RadioStationRuntime {
private readonly sharedRadioSources = new Map<string, SharedRadioSource>();
private readonly itemRadioOutputs = new Map<string, ItemRadioOutput>();
private layerEnabled = true;
constructor(private readonly audio: AudioEngine) {}
@@ -148,7 +149,20 @@ export class RadioStationRuntime {
}
}
async setLayerEnabled(enabled: boolean, items: Iterable<WorldItem>): Promise<void> {
this.layerEnabled = enabled;
if (!enabled) {
this.cleanupAll();
return;
}
await this.sync(items);
}
async sync(items: Iterable<WorldItem>): Promise<void> {
if (!this.layerEnabled) {
this.cleanupAll();
return;
}
const validIds = new Set<string>();
for (const item of items) {
if (item.type !== 'radio_station') continue;
@@ -163,6 +177,7 @@ export class RadioStationRuntime {
}
updateSpatialAudio(items: Map<string, WorldItem>, playerPosition: { x: number; y: number }): void {
if (!this.layerEnabled) return;
const audioCtx = this.audio.context;
if (!audioCtx) return;
for (const [itemId, output] of this.itemRadioOutputs.entries()) {

View File

@@ -40,6 +40,7 @@ const AUDIO_OUTPUT_STORAGE_KEY = 'chatGridAudioOutputDeviceId';
const AUDIO_INPUT_NAME_STORAGE_KEY = 'chatGridAudioInputDeviceName';
const AUDIO_OUTPUT_NAME_STORAGE_KEY = 'chatGridAudioOutputDeviceName';
const AUDIO_OUTPUT_MODE_STORAGE_KEY = 'chatGridAudioOutputMode';
const AUDIO_LAYER_STATE_STORAGE_KEY = 'chatGridAudioLayers';
const DEFAULT_DISPLAY_TIME_ZONE = 'America/Detroit';
const NICKNAME_STORAGE_KEY = 'spatialChatNickname';
const NICKNAME_MAX_LENGTH = 32;
@@ -104,6 +105,13 @@ type ChangelogData = {
sections: ChangelogSection[];
};
type AudioLayerState = {
voice: boolean;
item: boolean;
media: boolean;
world: boolean;
};
const APP_VERSION = String(window.CHGRID_WEB_VERSION ?? '').trim();
const DISPLAY_TIME_ZONE = resolveDisplayTimeZone();
const CLOCK_TIME_ZONE_OPTIONS = [
@@ -214,6 +222,12 @@ const itemEmitRuntime = new ItemEmitRuntime(audio, resolveIncomingSoundUrl);
let internalClipboardText = '';
let replaceTextOnNextType = false;
let pendingEscapeDisconnect = false;
let audioLayers: AudioLayerState = {
voice: true,
item: true,
media: true,
world: true,
};
const signalingProtocol = window.location.protocol === 'https:' ? 'wss' : 'ws';
const signalingUrl = `${signalingProtocol}://${window.location.host}/ws`;
@@ -230,6 +244,7 @@ const peerManager = new PeerManager(
audio.setOutputMode(outputMode);
loadEffectLevels();
loadAudioLayerState();
void loadChangelog();
function requiredById<T extends HTMLElement>(id: string): T {
@@ -378,6 +393,47 @@ function persistEffectLevels(): void {
localStorage.setItem(EFFECT_LEVELS_STORAGE_KEY, JSON.stringify(audio.getEffectLevels()));
}
function loadAudioLayerState(): void {
const raw = localStorage.getItem(AUDIO_LAYER_STATE_STORAGE_KEY);
if (raw) {
try {
const parsed = JSON.parse(raw) as Partial<AudioLayerState>;
audioLayers = {
voice: parsed.voice !== false,
item: parsed.item !== false,
media: parsed.media !== false,
world: parsed.world !== false,
};
} catch {
// Ignore malformed persisted values.
}
}
audio.setVoiceLayerEnabled(audioLayers.voice);
}
function persistAudioLayerState(): void {
localStorage.setItem(AUDIO_LAYER_STATE_STORAGE_KEY, JSON.stringify(audioLayers));
}
async function applyAudioLayerState(): Promise<void> {
audio.setVoiceLayerEnabled(audioLayers.voice);
if (audioLayers.voice) {
await peerManager.resumeRemoteAudio();
} else {
peerManager.suspendRemoteAudio();
}
await radioRuntime.setLayerEnabled(audioLayers.media, state.items.values());
await itemEmitRuntime.setLayerEnabled(audioLayers.item, state.items.values());
}
function toggleAudioLayer(layer: keyof AudioLayerState): void {
audioLayers = { ...audioLayers, [layer]: !audioLayers[layer] };
persistAudioLayerState();
void applyAudioLayerState();
updateStatus(`${layer} layer ${audioLayers[layer] ? 'on' : 'off'}.`);
audio.sfxUiBlip();
}
function pushChatMessage(message: string): void {
messageBuffer.push(message);
if (messageBuffer.length > 300) {
@@ -983,6 +1039,7 @@ async function onMessage(message: IncomingMessage): Promise<void> {
}
await radioRuntime.sync(state.items.values());
await itemEmitRuntime.sync(state.items.values());
await applyAudioLayerState();
gameLoop();
break;
@@ -1012,11 +1069,13 @@ async function onMessage(message: IncomingMessage): Promise<void> {
if (peer) {
const movementDelta = Math.hypot(message.x - prevX, message.y - prevY);
const soundUrl = movementDelta > 1.5 ? TELEPORT_SOUND_URL : randomFootstepUrl();
void audio.playSpatialSample(
soundUrl,
{ x: peer.x - state.player.x, y: peer.y - state.player.y },
FOOTSTEP_GAIN,
);
if (audioLayers.world) {
void audio.playSpatialSample(
soundUrl,
{ x: peer.x - state.player.x, y: peer.y - state.player.y },
FOOTSTEP_GAIN,
);
}
}
break;
}
@@ -1120,11 +1179,13 @@ async function onMessage(message: IncomingMessage): Promise<void> {
case 'item_use_sound': {
const soundUrl = resolveIncomingSoundUrl(message.sound);
if (!soundUrl) break;
void audio.playSpatialSample(
soundUrl,
{ x: message.x - state.player.x, y: message.y - state.player.y },
1,
);
if (audioLayers.world) {
void audio.playSpatialSample(
soundUrl,
{ x: message.x - state.player.x, y: message.y - state.player.y },
1,
);
}
break;
}
}
@@ -1173,6 +1234,26 @@ function handleNormalModeInput(code: string, shiftKey: boolean): void {
return;
}
if (code === 'Digit1') {
toggleAudioLayer('voice');
return;
}
if (code === 'Digit2') {
toggleAudioLayer('item');
return;
}
if (code === 'Digit3') {
toggleAudioLayer('media');
return;
}
if (code === 'Digit4') {
toggleAudioLayer('world');
return;
}
if (code === 'KeyE') {
const currentEffect = audio.getCurrentEffect();
const currentIndex = EFFECT_SEQUENCE.findIndex((effect) => effect.id === currentEffect.id);

View File

@@ -4,6 +4,7 @@ import type { RemoteUser } from '../network/protocol';
export type PeerRuntime = SpatialPeerRuntime & {
id: string;
pc: RTCPeerConnection;
remoteStream?: MediaStream;
};
type SendSignal = (targetId: string, payload: { sdp?: RTCSessionDescriptionInit; ice?: RTCIceCandidateInit }) => void;
@@ -57,7 +58,12 @@ export class PeerManager {
};
pc.ontrack = async (event) => {
await this.audio.attachRemoteStream(peer, event.streams[0], this.outputDeviceId);
peer.remoteStream = event.streams[0];
if (this.audio.isVoiceLayerEnabled()) {
await this.audio.attachRemoteStream(peer, event.streams[0], this.outputDeviceId);
} else {
this.audio.cleanupPeerAudio(peer);
}
};
if (isInitiator) {
@@ -150,6 +156,19 @@ export class PeerManager {
}
}
suspendRemoteAudio(): void {
for (const peer of this.peers.values()) {
this.audio.cleanupPeerAudio(peer);
}
}
async resumeRemoteAudio(): Promise<void> {
for (const peer of this.peers.values()) {
if (!peer.remoteStream) continue;
await this.audio.attachRemoteStream(peer, peer.remoteStream, this.outputDeviceId);
}
}
private tuneOpus(desc: RTCSessionDescriptionInit): RTCSessionDescriptionInit {
if (!desc.sdp) return desc;
const lines = desc.sdp.split('\r\n');