// MP3-based audio engine with iOS-friendly background playback.
// Each track <audio> -> MediaElementSource -> Gain -> mixDest
// (MediaStreamAudioDestinationNode). A single output <audio> plays the
// mix via srcObject so iOS keeps the audio session alive on lock/background.

declare global {
  interface Window {
    audioEngine: AudioEngine;
    webkitAudioContext?: typeof AudioContext;
  }
}

interface SoundLike {
  id: string;
  file?: string;
  blob?: Blob;
  isCustom?: boolean;
  loopMode?: 'continuous' | 'interval';
  intervalSec?: number;
}

interface Track {
  audio: HTMLAudioElement;
  source: MediaElementAudioSourceNode | null;
  gain: GainNode | null;
  vol: number;
  isCustom: boolean;
  loopMode: 'continuous' | 'interval';
  intervalSec: number;
  intervalHandle: number | null;
  blobUrl: string | null;
}

class AudioEngine {
  ctx: AudioContext | null = null;
  master: GainNode | null = null;
  mixDest: MediaStreamAudioDestinationNode | null = null;
  outputEl: HTMLAudioElement | null = null;
  masterVol: number = 0.7;
  tracks: Map<string, Track> = new Map();

  init(): void {
    if (this.ctx) return;
    const Ctx = (window.AudioContext || window.webkitAudioContext) as typeof AudioContext;
    this.ctx = new Ctx();
    this.master = this.ctx.createGain();
    this.master.gain.value = this.masterVol;
    this.mixDest = this.ctx.createMediaStreamDestination();
    this.master.connect(this.mixDest);
    // Note: master is intentionally NOT connected to ctx.destination so that
    // ALL output goes through outputEl (the audio session pin for iOS bg).
    this.outputEl = new Audio();
    this.outputEl.srcObject = this.mixDest.stream;
    this.outputEl.preload = 'auto';
  }

  startOutput(): void {
    if (!this.outputEl) return;
    if (this.outputEl.paused) {
      this.outputEl.play().catch(() => {});
    }
  }

  setMaster(v: number): void {
    this.masterVol = v;
    if (!this.master || !this.ctx) return;
    const t = this.ctx.currentTime;
    this.master.gain.cancelScheduledValues(t);
    this.master.gain.setValueAtTime(this.master.gain.value, t);
    this.master.gain.linearRampToValueAtTime(v, t + 0.05);
  }

  /**
   * Register a sound (creates the <audio> element). Idempotent — calling
   * twice with same id is a no-op.
   */
  register(sound: SoundLike): void {
    if (this.tracks.has(sound.id)) return;
    const audio = new Audio();
    let blobUrl: string | null = null;
    if (sound.blob) {
      blobUrl = URL.createObjectURL(sound.blob);
      audio.src = blobUrl;
    } else if (sound.file) {
      audio.src = sound.file;
    }
    audio.preload = 'metadata';
    audio.crossOrigin = 'anonymous';
    audio.volume = 1;
    audio.loop = (sound.loopMode || 'continuous') !== 'interval';
    this.tracks.set(sound.id, {
      audio,
      source: null,
      gain: null,
      vol: 0.5,
      isCustom: !!sound.isCustom,
      loopMode: sound.loopMode || 'continuous',
      intervalSec: sound.intervalSec || 30,
      intervalHandle: null,
      blobUrl,
    });
  }

  unregister(id: string): void {
    const t = this.tracks.get(id);
    if (!t) return;
    this.stopInternal(id, true);
    if (t.blobUrl) {
      try { URL.revokeObjectURL(t.blobUrl); } catch (e) {}
    }
    this.tracks.delete(id);
  }

  setLoopMode(id: string, mode: 'continuous' | 'interval', intervalSec?: number): void {
    const t = this.tracks.get(id);
    if (!t) return;
    t.loopMode = mode;
    if (intervalSec != null) t.intervalSec = Math.max(1, Math.min(3600, intervalSec));
    t.audio.loop = mode !== 'interval';
    if (this.isPlaying(id)) {
      this.stopInterval(id);
      if (mode === 'interval') this.startInterval(id);
    }
  }

  private ensureRouting(id: string): void {
    const t = this.tracks.get(id);
    if (!t) return;
    if (t.source) return;
    this.init();
    t.source = this.ctx!.createMediaElementSource(t.audio);
    t.gain = this.ctx!.createGain();
    t.gain.gain.value = 0;
    t.source.connect(t.gain);
    t.gain.connect(this.master!);
  }

  /**
   * Start a sound. Should be called from within a user gesture handler
   * (the first call) so iOS can authorize audio output.
   */
  async play(sound: SoundLike, vol: number = 0.5): Promise<void> {
    this.init();
    this.startOutput();
    if (!this.tracks.has(sound.id)) this.register(sound);
    const t = this.tracks.get(sound.id)!;
    this.ensureRouting(sound.id);
    if (this.ctx!.state === 'suspended') {
      try { await this.ctx!.resume(); } catch (e) {}
    }
    t.vol = vol;
    if (t.gain) {
      const now = this.ctx!.currentTime;
      t.gain.gain.cancelScheduledValues(now);
      t.gain.gain.setValueAtTime(t.gain.gain.value, now);
      t.gain.gain.linearRampToValueAtTime(vol, now + 0.05);
    }
    try {
      await t.audio.play();
      if (t.loopMode === 'interval') this.startInterval(sound.id);
    } catch (e) {
      // Surface to status via console; app sets status text
      console.warn('audio play failed', sound.id, e);
    }
  }

  setVolume(id: string, v: number): void {
    const t = this.tracks.get(id);
    if (!t || !this.ctx) return;
    t.vol = v;
    if (!t.gain) return;
    const now = this.ctx.currentTime;
    t.gain.gain.cancelScheduledValues(now);
    t.gain.gain.setValueAtTime(t.gain.gain.value, now);
    t.gain.gain.linearRampToValueAtTime(v, now + 0.05);
  }

  stop(id: string): void { this.stopInternal(id, false); }

  private stopInternal(id: string, hard: boolean): void {
    const t = this.tracks.get(id);
    if (!t) return;
    this.stopInterval(id);
    try { t.audio.pause(); } catch (e) {}
    if (t.gain && this.ctx) {
      const now = this.ctx.currentTime;
      t.gain.gain.cancelScheduledValues(now);
      t.gain.gain.setValueAtTime(t.gain.gain.value, now);
      t.gain.gain.linearRampToValueAtTime(0, now + 0.05);
    }
    if (hard && t.source) {
      try { t.source.disconnect(); } catch (e) {}
      try { if (t.gain) t.gain.disconnect(); } catch (e) {}
      t.source = null;
      t.gain = null;
    }
  }

  stopAll(): void {
    [...this.tracks.keys()].forEach((id: string) => this.stop(id));
  }

  isPlaying(id: string): boolean {
    const t = this.tracks.get(id);
    if (!t) return false;
    return !t.audio.paused;
  }

  resumeIfNeeded(): void {
    if (!this.ctx) return;
    if (this.ctx.state === 'suspended') {
      this.ctx.resume().catch(() => {});
    }
    this.startOutput();
  }

  private startInterval(id: string): void {
    const t = this.tracks.get(id);
    if (!t) return;
    this.stopInterval(id);
    const ms = Math.max(1, t.intervalSec) * 1000;
    t.intervalHandle = window.setInterval(() => {
      const cur = this.tracks.get(id);
      if (!cur) return;
      try {
        cur.audio.currentTime = 0;
        cur.audio.play().catch(() => {});
      } catch (e) {}
    }, ms);
  }

  private stopInterval(id: string): void {
    const t = this.tracks.get(id);
    if (!t) return;
    if (t.intervalHandle != null) {
      clearInterval(t.intervalHandle);
      t.intervalHandle = null;
    }
  }
}

window.audioEngine = new AudioEngine();

// Wake the audio session if the page comes back to foreground while sounds
// are active (iOS sometimes suspends ctx in background).
document.addEventListener('visibilitychange', () => {
  if (!document.hidden) window.audioEngine.resumeIfNeeded();
});
