Apply item-specific spatial range to use sounds

This commit is contained in:
Jage9
2026-02-27 01:10:32 -05:00
parent 4ed52649f1
commit 47a7aa0a83
7 changed files with 15 additions and 6 deletions

View File

@@ -1,5 +1,5 @@
// Maintainer-controlled web client version. // Maintainer-controlled web client version.
// Format: YYYY.MM.DD Rn (example: 2026.02.20 R2) // Format: YYYY.MM.DD Rn (example: 2026.02.20 R2)
window.CHGRID_WEB_VERSION = "2026.02.25 R269"; window.CHGRID_WEB_VERSION = "2026.02.25 R270";
// Optional display timezone for timestamps. Falls back to America/Detroit if unset/invalid. // Optional display timezone for timestamps. Falls back to America/Detroit if unset/invalid.
window.CHGRID_TIME_ZONE = "America/Detroit"; window.CHGRID_TIME_ZONE = "America/Detroit";

View File

@@ -35,6 +35,7 @@ const ONE_SHOT_ATTACK_SECONDS = 0.02;
type ActiveSpatialSampleRuntime = { type ActiveSpatialSampleRuntime = {
sourceX: number; sourceX: number;
sourceY: number; sourceY: number;
range: number;
baseGain: number; baseGain: number;
gainNode: GainNode; gainNode: GainNode;
pannerNode: StereoPannerNode | null; pannerNode: StereoPannerNode | null;
@@ -361,6 +362,7 @@ export class AudioEngine {
sourcePosition: { x: number; y: number }, sourcePosition: { x: number; y: number },
playerPosition: { x: number; y: number }, playerPosition: { x: number; y: number },
gain = 1, gain = 1,
range = HEARING_RADIUS,
): Promise<void> { ): Promise<void> {
await this.ensureContext(); await this.ensureContext();
const { audioCtx, sfxGainNode } = this; const { audioCtx, sfxGainNode } = this;
@@ -383,6 +385,7 @@ export class AudioEngine {
const runtime: ActiveSpatialSampleRuntime = { const runtime: ActiveSpatialSampleRuntime = {
sourceX: sourcePosition.x, sourceX: sourcePosition.x,
sourceY: sourcePosition.y, sourceY: sourcePosition.y,
range: Math.max(1, range),
baseGain: gain, baseGain: gain,
gainNode, gainNode,
pannerNode, pannerNode,
@@ -412,6 +415,7 @@ export class AudioEngine {
sourcePosition: { x: number; y: number }, sourcePosition: { x: number; y: number },
playerPosition: { x: number; y: number }, playerPosition: { x: number; y: number },
gain = 1, gain = 1,
range = HEARING_RADIUS,
): Promise<void> { ): Promise<void> {
await this.ensureContext(); await this.ensureContext();
const { audioCtx, sfxGainNode } = this; const { audioCtx, sfxGainNode } = this;
@@ -434,6 +438,7 @@ export class AudioEngine {
const runtime: ActiveSpatialSampleRuntime = { const runtime: ActiveSpatialSampleRuntime = {
sourceX: sourcePosition.x, sourceX: sourcePosition.x,
sourceY: sourcePosition.y, sourceY: sourcePosition.y,
range: Math.max(1, range),
baseGain: gain, baseGain: gain,
gainNode, gainNode,
pannerNode, pannerNode,
@@ -619,7 +624,7 @@ export class AudioEngine {
const mix = resolveSpatialMix({ const mix = resolveSpatialMix({
dx: sample.sourceX - playerPosition.x, dx: sample.sourceX - playerPosition.x,
dy: sample.sourceY - playerPosition.y, dy: sample.sourceY - playerPosition.y,
range: HEARING_RADIUS, range: sample.range,
baseGain: sample.baseGain, baseGain: sample.baseGain,
}); });
const gainValue = mix?.gain ?? 0; const gainValue = mix?.gain ?? 0;

View File

@@ -1657,8 +1657,8 @@ const onAppMessage = createOnMessageHandler({
shouldAnnounceItemPropertyEcho: () => Date.now() >= suppressItemPropertyEchoUntilMs, shouldAnnounceItemPropertyEcho: () => Date.now() >= suppressItemPropertyEchoUntilMs,
playLocateToneAt: (x, y) => audio.sfxLocate({ x: x - state.player.x, y: y - state.player.y }), playLocateToneAt: (x, y) => audio.sfxLocate({ x: x - state.player.x, y: y - state.player.y }),
resolveIncomingSoundUrl, resolveIncomingSoundUrl,
playIncomingItemUseSound: (url, x, y) => { playIncomingItemUseSound: (url, x, y, range) => {
void audio.playSpatialSample(url, { x, y }, { x: state.player.x, y: state.player.y }, 1); void audio.playSpatialSample(url, { x, y }, { x: state.player.x, y: state.player.y }, 1, range ?? HEARING_RADIUS);
}, },
playClockAnnouncement: (sounds, x, y) => { playClockAnnouncement: (sounds, x, y) => {
void clockAnnouncer.playSequence(sounds.map(resolveIncomingSoundUrl), x, y); void clockAnnouncer.playSequence(sounds.map(resolveIncomingSoundUrl), x, y);

View File

@@ -68,7 +68,7 @@ type MessageHandlerDeps = {
shouldAnnounceItemPropertyEcho: () => boolean; shouldAnnounceItemPropertyEcho: () => boolean;
playLocateToneAt: (x: number, y: number) => void; playLocateToneAt: (x: number, y: number) => void;
resolveIncomingSoundUrl: (url: string) => string; resolveIncomingSoundUrl: (url: string) => string;
playIncomingItemUseSound: (url: string, x: number, y: number) => void; playIncomingItemUseSound: (url: string, x: number, y: number, range?: number) => void;
playClockAnnouncement: (sounds: string[], x: number, y: number) => void; playClockAnnouncement: (sounds: string[], x: number, y: number) => void;
handleAuthRequired: (message: Extract<IncomingMessage, { type: 'auth_required' }>) => void; handleAuthRequired: (message: Extract<IncomingMessage, { type: 'auth_required' }>) => void;
handleAuthResult: (message: Extract<IncomingMessage, { type: 'auth_result' }>) => Promise<void>; handleAuthResult: (message: Extract<IncomingMessage, { type: 'auth_result' }>) => Promise<void>;
@@ -297,7 +297,7 @@ export function createOnMessageHandler(deps: MessageHandlerDeps): (message: Inco
const soundUrl = deps.resolveIncomingSoundUrl(message.sound); const soundUrl = deps.resolveIncomingSoundUrl(message.sound);
if (!soundUrl) break; if (!soundUrl) break;
if (deps.getAudioLayers().world) { if (deps.getAudioLayers().world) {
deps.playIncomingItemUseSound(soundUrl, message.x, message.y); deps.playIncomingItemUseSound(soundUrl, message.x, message.y, message.range);
} }
break; break;
} }

View File

@@ -213,6 +213,7 @@ export const itemUseSoundSchema = z.object({
sound: z.string(), sound: z.string(),
x: z.number().int(), x: z.number().int(),
y: z.number().int(), y: z.number().int(),
range: z.number().int().positive().optional(),
}); });
export const itemClockAnnounceSchema = z.object({ export const itemClockAnnounceSchema = z.object({

View File

@@ -292,6 +292,7 @@ class ItemUseSoundPacket(BasePacket):
sound: str sound: str
x: int x: int
y: int y: int
range: int | None = None
class ItemClockAnnouncePacket(BasePacket): class ItemClockAnnouncePacket(BasePacket):

View File

@@ -1770,6 +1770,7 @@ class SignalingServer:
use_sound = self._resolve_item_use_sound(item) use_sound = self._resolve_item_use_sound(item)
if use_sound: if use_sound:
sound_x, sound_y = self._get_item_sound_source_position(item) sound_x, sound_y = self._get_item_sound_source_position(item)
sound_range = self._get_item_emit_range(item)
await self._broadcast( await self._broadcast(
ItemUseSoundPacket( ItemUseSoundPacket(
type="item_use_sound", type="item_use_sound",
@@ -1777,6 +1778,7 @@ class SignalingServer:
sound=use_sound, sound=use_sound,
x=sound_x, x=sound_x,
y=sound_y, y=sound_y,
range=sound_range,
) )
) )
if item.type == "clock": if item.type == "clock":