Add audio layer toggles and reduce item emit volume

This commit is contained in:
Jage9
2026-02-21 16:30:31 -05:00
parent 3a64f7d38c
commit e0fc98d3f1
6 changed files with 169 additions and 16 deletions

View File

@@ -1,5 +1,5 @@
// Maintainer-controlled web client version. // Maintainer-controlled web client version.
// Format: YYYY.MM.DD Rn (example: 2026.02.20 R2) // Format: YYYY.MM.DD Rn (example: 2026.02.20 R2)
window.CHGRID_WEB_VERSION = "2026.02.21 R101"; window.CHGRID_WEB_VERSION = "2026.02.21 R102";
// Optional display timezone for timestamps. Falls back to America/Detroit if unset/invalid. // Optional display timezone for timestamps. Falls back to America/Detroit if unset/invalid.
window.CHGRID_TIME_ZONE = "America/Detroit"; window.CHGRID_TIME_ZONE = "America/Detroit";

View File

@@ -41,6 +41,7 @@ export class AudioEngine {
private loopbackEnabled = false; private loopbackEnabled = false;
private loopbackRuntime: EffectRuntime | null = null; private loopbackRuntime: EffectRuntime | null = null;
private outputMode: OutputMode = 'stereo'; private outputMode: OutputMode = 'stereo';
private voiceLayerEnabled = true;
private effectIndex = EFFECT_SEQUENCE.findIndex((effect) => effect.id === 'off'); private effectIndex = EFFECT_SEQUENCE.findIndex((effect) => effect.id === 'off');
private readonly effectValues: Record<EffectId, number> = { private readonly effectValues: Record<EffectId, number> = {
reverb: 50, reverb: 50,
@@ -173,6 +174,14 @@ export class AudioEngine {
return this.outputMode; return this.outputMode;
} }
setVoiceLayerEnabled(enabled: boolean): void {
this.voiceLayerEnabled = enabled;
}
isVoiceLayerEnabled(): boolean {
return this.voiceLayerEnabled;
}
toggleLoopback(): boolean { toggleLoopback(): boolean {
this.loopbackEnabled = !this.loopbackEnabled; this.loopbackEnabled = !this.loopbackEnabled;
this.rebuildOutboundEffectGraph(); this.rebuildOutboundEffectGraph();
@@ -186,6 +195,7 @@ export class AudioEngine {
): Promise<void> { ): Promise<void> {
await this.ensureContext(); await this.ensureContext();
if (!this.audioCtx) return; if (!this.audioCtx) return;
this.cleanupPeerAudio(peer);
const audioElement = new Audio(); const audioElement = new Audio();
audioElement.srcObject = stream; audioElement.srcObject = stream;
@@ -206,10 +216,14 @@ export class AudioEngine {
let pannerNode: StereoPannerNode | undefined; let pannerNode: StereoPannerNode | undefined;
if (this.supportsStereoPanner()) { if (this.supportsStereoPanner()) {
pannerNode = this.audioCtx.createStereoPanner(); pannerNode = this.audioCtx.createStereoPanner();
if (this.voiceLayerEnabled) {
gainNode.connect(pannerNode).connect(this.audioCtx.destination); gainNode.connect(pannerNode).connect(this.audioCtx.destination);
}
} else { } else {
if (this.voiceLayerEnabled) {
gainNode.connect(this.audioCtx.destination); gainNode.connect(this.audioCtx.destination);
} }
}
peer.audioElement = audioElement; peer.audioElement = audioElement;
peer.gain = gainNode; peer.gain = gainNode;
@@ -313,9 +327,16 @@ export class AudioEngine {
} }
cleanupPeerAudio(peer: SpatialPeerRuntime): void { cleanupPeerAudio(peer: SpatialPeerRuntime): void {
peer.audioElement?.remove(); if (peer.audioElement) {
peer.audioElement.pause();
peer.audioElement.srcObject = null;
peer.audioElement.remove();
}
peer.gain?.disconnect(); peer.gain?.disconnect();
peer.panner?.disconnect(); peer.panner?.disconnect();
peer.audioElement = undefined;
peer.gain = undefined;
peer.panner = undefined;
} }
private rebuildOutboundEffectGraph(): void { private rebuildOutboundEffectGraph(): void {

View File

@@ -9,8 +9,11 @@ type EmitOutput = {
panner: StereoPannerNode | null; panner: StereoPannerNode | null;
}; };
const ITEM_EMIT_BASE_GAIN = 0.3;
export class ItemEmitRuntime { export class ItemEmitRuntime {
private readonly outputs = new Map<string, EmitOutput>(); private readonly outputs = new Map<string, EmitOutput>();
private layerEnabled = true;
constructor( constructor(
private readonly audio: AudioEngine, private readonly audio: AudioEngine,
@@ -34,7 +37,20 @@ export class ItemEmitRuntime {
} }
} }
async setLayerEnabled(enabled: boolean, items: Iterable<WorldItem>): Promise<void> {
this.layerEnabled = enabled;
if (!enabled) {
this.cleanupAll();
return;
}
await this.sync(items);
}
async sync(items: Iterable<WorldItem>): Promise<void> { async sync(items: Iterable<WorldItem>): Promise<void> {
if (!this.layerEnabled) {
this.cleanupAll();
return;
}
const validIds = new Set<string>(); const validIds = new Set<string>();
await this.audio.ensureContext(); await this.audio.ensureContext();
const audioCtx = this.audio.context; const audioCtx = this.audio.context;
@@ -81,6 +97,7 @@ export class ItemEmitRuntime {
} }
updateSpatialAudio(items: Map<string, WorldItem>, playerPosition: { x: number; y: number }): void { updateSpatialAudio(items: Map<string, WorldItem>, playerPosition: { x: number; y: number }): void {
if (!this.layerEnabled) return;
const audioCtx = this.audio.context; const audioCtx = this.audio.context;
if (!audioCtx) return; if (!audioCtx) return;
@@ -101,7 +118,7 @@ export class ItemEmitRuntime {
gainValue = 1; gainValue = 1;
panValue = 0; panValue = 0;
} }
output.gain.gain.linearRampToValueAtTime(gainValue, audioCtx.currentTime + 0.1); output.gain.gain.linearRampToValueAtTime(gainValue * ITEM_EMIT_BASE_GAIN, audioCtx.currentTime + 0.1);
if (output.panner) { if (output.panner) {
const resolvedPan = this.audio.getOutputMode() === 'mono' ? 0 : Math.max(-1, Math.min(1, panValue)); const resolvedPan = this.audio.getOutputMode() === 'mono' ? 0 : Math.max(-1, Math.min(1, panValue));
output.panner.pan.linearRampToValueAtTime(resolvedPan, audioCtx.currentTime + 0.1); output.panner.pan.linearRampToValueAtTime(resolvedPan, audioCtx.currentTime + 0.1);

View File

@@ -110,6 +110,7 @@ function connectRadioChannelSource(
export class RadioStationRuntime { export class RadioStationRuntime {
private readonly sharedRadioSources = new Map<string, SharedRadioSource>(); private readonly sharedRadioSources = new Map<string, SharedRadioSource>();
private readonly itemRadioOutputs = new Map<string, ItemRadioOutput>(); private readonly itemRadioOutputs = new Map<string, ItemRadioOutput>();
private layerEnabled = true;
constructor(private readonly audio: AudioEngine) {} constructor(private readonly audio: AudioEngine) {}
@@ -148,7 +149,20 @@ export class RadioStationRuntime {
} }
} }
async setLayerEnabled(enabled: boolean, items: Iterable<WorldItem>): Promise<void> {
this.layerEnabled = enabled;
if (!enabled) {
this.cleanupAll();
return;
}
await this.sync(items);
}
async sync(items: Iterable<WorldItem>): Promise<void> { async sync(items: Iterable<WorldItem>): Promise<void> {
if (!this.layerEnabled) {
this.cleanupAll();
return;
}
const validIds = new Set<string>(); const validIds = new Set<string>();
for (const item of items) { for (const item of items) {
if (item.type !== 'radio_station') continue; if (item.type !== 'radio_station') continue;
@@ -163,6 +177,7 @@ export class RadioStationRuntime {
} }
updateSpatialAudio(items: Map<string, WorldItem>, playerPosition: { x: number; y: number }): void { updateSpatialAudio(items: Map<string, WorldItem>, playerPosition: { x: number; y: number }): void {
if (!this.layerEnabled) return;
const audioCtx = this.audio.context; const audioCtx = this.audio.context;
if (!audioCtx) return; if (!audioCtx) return;
for (const [itemId, output] of this.itemRadioOutputs.entries()) { for (const [itemId, output] of this.itemRadioOutputs.entries()) {

View File

@@ -40,6 +40,7 @@ const AUDIO_OUTPUT_STORAGE_KEY = 'chatGridAudioOutputDeviceId';
const AUDIO_INPUT_NAME_STORAGE_KEY = 'chatGridAudioInputDeviceName'; const AUDIO_INPUT_NAME_STORAGE_KEY = 'chatGridAudioInputDeviceName';
const AUDIO_OUTPUT_NAME_STORAGE_KEY = 'chatGridAudioOutputDeviceName'; const AUDIO_OUTPUT_NAME_STORAGE_KEY = 'chatGridAudioOutputDeviceName';
const AUDIO_OUTPUT_MODE_STORAGE_KEY = 'chatGridAudioOutputMode'; const AUDIO_OUTPUT_MODE_STORAGE_KEY = 'chatGridAudioOutputMode';
const AUDIO_LAYER_STATE_STORAGE_KEY = 'chatGridAudioLayers';
const DEFAULT_DISPLAY_TIME_ZONE = 'America/Detroit'; const DEFAULT_DISPLAY_TIME_ZONE = 'America/Detroit';
const NICKNAME_STORAGE_KEY = 'spatialChatNickname'; const NICKNAME_STORAGE_KEY = 'spatialChatNickname';
const NICKNAME_MAX_LENGTH = 32; const NICKNAME_MAX_LENGTH = 32;
@@ -104,6 +105,13 @@ type ChangelogData = {
sections: ChangelogSection[]; sections: ChangelogSection[];
}; };
type AudioLayerState = {
voice: boolean;
item: boolean;
media: boolean;
world: boolean;
};
const APP_VERSION = String(window.CHGRID_WEB_VERSION ?? '').trim(); const APP_VERSION = String(window.CHGRID_WEB_VERSION ?? '').trim();
const DISPLAY_TIME_ZONE = resolveDisplayTimeZone(); const DISPLAY_TIME_ZONE = resolveDisplayTimeZone();
const CLOCK_TIME_ZONE_OPTIONS = [ const CLOCK_TIME_ZONE_OPTIONS = [
@@ -214,6 +222,12 @@ const itemEmitRuntime = new ItemEmitRuntime(audio, resolveIncomingSoundUrl);
let internalClipboardText = ''; let internalClipboardText = '';
let replaceTextOnNextType = false; let replaceTextOnNextType = false;
let pendingEscapeDisconnect = false; let pendingEscapeDisconnect = false;
let audioLayers: AudioLayerState = {
voice: true,
item: true,
media: true,
world: true,
};
const signalingProtocol = window.location.protocol === 'https:' ? 'wss' : 'ws'; const signalingProtocol = window.location.protocol === 'https:' ? 'wss' : 'ws';
const signalingUrl = `${signalingProtocol}://${window.location.host}/ws`; const signalingUrl = `${signalingProtocol}://${window.location.host}/ws`;
@@ -230,6 +244,7 @@ const peerManager = new PeerManager(
audio.setOutputMode(outputMode); audio.setOutputMode(outputMode);
loadEffectLevels(); loadEffectLevels();
loadAudioLayerState();
void loadChangelog(); void loadChangelog();
function requiredById<T extends HTMLElement>(id: string): T { function requiredById<T extends HTMLElement>(id: string): T {
@@ -378,6 +393,47 @@ function persistEffectLevels(): void {
localStorage.setItem(EFFECT_LEVELS_STORAGE_KEY, JSON.stringify(audio.getEffectLevels())); localStorage.setItem(EFFECT_LEVELS_STORAGE_KEY, JSON.stringify(audio.getEffectLevels()));
} }
function loadAudioLayerState(): void {
const raw = localStorage.getItem(AUDIO_LAYER_STATE_STORAGE_KEY);
if (raw) {
try {
const parsed = JSON.parse(raw) as Partial<AudioLayerState>;
audioLayers = {
voice: parsed.voice !== false,
item: parsed.item !== false,
media: parsed.media !== false,
world: parsed.world !== false,
};
} catch {
// Ignore malformed persisted values.
}
}
audio.setVoiceLayerEnabled(audioLayers.voice);
}
function persistAudioLayerState(): void {
localStorage.setItem(AUDIO_LAYER_STATE_STORAGE_KEY, JSON.stringify(audioLayers));
}
async function applyAudioLayerState(): Promise<void> {
audio.setVoiceLayerEnabled(audioLayers.voice);
if (audioLayers.voice) {
await peerManager.resumeRemoteAudio();
} else {
peerManager.suspendRemoteAudio();
}
await radioRuntime.setLayerEnabled(audioLayers.media, state.items.values());
await itemEmitRuntime.setLayerEnabled(audioLayers.item, state.items.values());
}
function toggleAudioLayer(layer: keyof AudioLayerState): void {
audioLayers = { ...audioLayers, [layer]: !audioLayers[layer] };
persistAudioLayerState();
void applyAudioLayerState();
updateStatus(`${layer} layer ${audioLayers[layer] ? 'on' : 'off'}.`);
audio.sfxUiBlip();
}
function pushChatMessage(message: string): void { function pushChatMessage(message: string): void {
messageBuffer.push(message); messageBuffer.push(message);
if (messageBuffer.length > 300) { if (messageBuffer.length > 300) {
@@ -983,6 +1039,7 @@ async function onMessage(message: IncomingMessage): Promise<void> {
} }
await radioRuntime.sync(state.items.values()); await radioRuntime.sync(state.items.values());
await itemEmitRuntime.sync(state.items.values()); await itemEmitRuntime.sync(state.items.values());
await applyAudioLayerState();
gameLoop(); gameLoop();
break; break;
@@ -1012,12 +1069,14 @@ async function onMessage(message: IncomingMessage): Promise<void> {
if (peer) { if (peer) {
const movementDelta = Math.hypot(message.x - prevX, message.y - prevY); const movementDelta = Math.hypot(message.x - prevX, message.y - prevY);
const soundUrl = movementDelta > 1.5 ? TELEPORT_SOUND_URL : randomFootstepUrl(); const soundUrl = movementDelta > 1.5 ? TELEPORT_SOUND_URL : randomFootstepUrl();
if (audioLayers.world) {
void audio.playSpatialSample( void audio.playSpatialSample(
soundUrl, soundUrl,
{ x: peer.x - state.player.x, y: peer.y - state.player.y }, { x: peer.x - state.player.x, y: peer.y - state.player.y },
FOOTSTEP_GAIN, FOOTSTEP_GAIN,
); );
} }
}
break; break;
} }
@@ -1120,11 +1179,13 @@ async function onMessage(message: IncomingMessage): Promise<void> {
case 'item_use_sound': { case 'item_use_sound': {
const soundUrl = resolveIncomingSoundUrl(message.sound); const soundUrl = resolveIncomingSoundUrl(message.sound);
if (!soundUrl) break; if (!soundUrl) break;
if (audioLayers.world) {
void audio.playSpatialSample( void audio.playSpatialSample(
soundUrl, soundUrl,
{ x: message.x - state.player.x, y: message.y - state.player.y }, { x: message.x - state.player.x, y: message.y - state.player.y },
1, 1,
); );
}
break; break;
} }
} }
@@ -1173,6 +1234,26 @@ function handleNormalModeInput(code: string, shiftKey: boolean): void {
return; return;
} }
if (code === 'Digit1') {
toggleAudioLayer('voice');
return;
}
if (code === 'Digit2') {
toggleAudioLayer('item');
return;
}
if (code === 'Digit3') {
toggleAudioLayer('media');
return;
}
if (code === 'Digit4') {
toggleAudioLayer('world');
return;
}
if (code === 'KeyE') { if (code === 'KeyE') {
const currentEffect = audio.getCurrentEffect(); const currentEffect = audio.getCurrentEffect();
const currentIndex = EFFECT_SEQUENCE.findIndex((effect) => effect.id === currentEffect.id); const currentIndex = EFFECT_SEQUENCE.findIndex((effect) => effect.id === currentEffect.id);

View File

@@ -4,6 +4,7 @@ import type { RemoteUser } from '../network/protocol';
export type PeerRuntime = SpatialPeerRuntime & { export type PeerRuntime = SpatialPeerRuntime & {
id: string; id: string;
pc: RTCPeerConnection; pc: RTCPeerConnection;
remoteStream?: MediaStream;
}; };
type SendSignal = (targetId: string, payload: { sdp?: RTCSessionDescriptionInit; ice?: RTCIceCandidateInit }) => void; type SendSignal = (targetId: string, payload: { sdp?: RTCSessionDescriptionInit; ice?: RTCIceCandidateInit }) => void;
@@ -57,7 +58,12 @@ export class PeerManager {
}; };
pc.ontrack = async (event) => { pc.ontrack = async (event) => {
peer.remoteStream = event.streams[0];
if (this.audio.isVoiceLayerEnabled()) {
await this.audio.attachRemoteStream(peer, event.streams[0], this.outputDeviceId); await this.audio.attachRemoteStream(peer, event.streams[0], this.outputDeviceId);
} else {
this.audio.cleanupPeerAudio(peer);
}
}; };
if (isInitiator) { if (isInitiator) {
@@ -150,6 +156,19 @@ export class PeerManager {
} }
} }
suspendRemoteAudio(): void {
for (const peer of this.peers.values()) {
this.audio.cleanupPeerAudio(peer);
}
}
async resumeRemoteAudio(): Promise<void> {
for (const peer of this.peers.values()) {
if (!peer.remoteStream) continue;
await this.audio.attachRemoteStream(peer, peer.remoteStream, this.outputDeviceId);
}
}
private tuneOpus(desc: RTCSessionDescriptionInit): RTCSessionDescriptionInit { private tuneOpus(desc: RTCSessionDescriptionInit): RTCSessionDescriptionInit {
if (!desc.sdp) return desc; if (!desc.sdp) return desc;
const lines = desc.sdp.split('\r\n'); const lines = desc.sdp.split('\r\n');