import 'package:audio_service/audio_service.dart'; import 'package:just_audio/just_audio.dart'; import '../modelos/emisora.dart'; /// Estado de reproducción expuesto al UI. enum EstadoReproduccion { detenido, cargando, reproduciendo, pausado, error } // ───────────────────────────────────────────────────────────── // Handler global — inicializado en main.dart con AudioService.init // ───────────────────────────────────────────────────────────── PluriWaveAudioHandler? _handlerGlobal; /// Registra el handler. Llamar desde main.dart tras AudioService.init. void registrarHandler(PluriWaveAudioHandler handler) { _handlerGlobal = handler; } /// Wrapper de alto nivel para el UI. /// /// Delega TODA la reproducción al [PluriWaveAudioHandler] para garantizar /// que el audio siga vivo en background con notificación foreground. class ServicioAudio { PluriWaveAudioHandler get _handler { assert(_handlerGlobal != null, 'ServicioAudio: handler no registrado. ' 'Llama registrarHandler() en main.dart tras AudioService.init.'); return _handlerGlobal!; } Emisora? get emisoraActual => _handler.emisoraActual; Stream get estadoStream => _handler.playbackState.map((s) { if (s.processingState == AudioProcessingState.loading || s.processingState == AudioProcessingState.buffering) { return EstadoReproduccion.cargando; } if (s.playing) return EstadoReproduccion.reproduciendo; if (s.processingState == AudioProcessingState.idle) { return EstadoReproduccion.detenido; } return EstadoReproduccion.pausado; }); Future reproducir(Emisora emisora) async { final item = MediaItem( id: emisora.url, title: emisora.nombre, artist: emisora.pais ?? '', album: 'PluriWave', artUri: emisora.favicon != null && emisora.favicon!.isNotEmpty ? Uri.tryParse(emisora.favicon!) : null, extras: {'uuid': emisora.uuid}, ); await _handler.playMediaItem(item); } Future pausar() => _handler.pause(); Future reanudar() => _handler.play(); Future togglePlay() async { if (_handler.playbackState.value.playing) { await pausar(); } else { await reanudar(); } } Future detener() => _handler.stop(); Future setVolumen(double vol) => _handler.setVolume(vol.clamp(0.0, 1.0)); double get volumen => _handler.volumen; bool get estaSonando => _handler.playbackState.value.playing; /// No-op: el handler se limpia en main.dart al cerrar la app. Future dispose() async {} } // ───────────────────────────────────────────────────────────── // AudioHandler — núcleo del audio en background // ───────────────────────────────────────────────────────────── /// Handler de audio_service. /// /// Gestiona la reproducción con `just_audio` y mantiene la notificación /// foreground activa mientras hay audio reproduciéndose. /// /// ### Inicialización en main.dart /// ```dart /// final handler = await AudioService.init( /// builder: () => PluriWaveAudioHandler(), /// config: const AudioServiceConfig( /// androidNotificationChannelId: 'es.freetimelab.pluriwave.audio', /// androidNotificationChannelName: 'PluriWave Radio', /// androidNotificationOngoing: true, /// androidStopForegroundOnPause: true, /// androidNotificationIcon: 'drawable/ic_stat_radio', /// ), /// ); /// registrarHandler(handler); /// ``` class PluriWaveAudioHandler extends BaseAudioHandler with SeekHandler { final AudioPlayer _player = AudioPlayer(); Emisora? emisoraActual; double _volumen = 1.0; double get volumen => _volumen; PluriWaveAudioHandler() { _setupStreams(); } void _setupStreams() { // Propagar estado del player → playbackState (lo que ve la notificación) _player.playerStateStream.listen((state) { final playing = state.playing; final proc = state.processingState; playbackState.add(playbackState.value.copyWith( controls: [ if (playing) MediaControl.pause else MediaControl.play, MediaControl.stop, ], systemActions: const {MediaAction.seek, MediaAction.stop}, androidCompactActionIndices: const [0], processingState: _mapProcState(proc), playing: playing, bufferedPosition: _player.bufferedPosition, speed: _player.speed, )); }); // Actualizar bufferedPosition _player.bufferedPositionStream.listen((pos) { playbackState.add(playbackState.value.copyWith(bufferedPosition: pos)); }); } AudioProcessingState _mapProcState(ProcessingState state) { return switch (state) { ProcessingState.idle => AudioProcessingState.idle, ProcessingState.loading => AudioProcessingState.loading, ProcessingState.buffering => AudioProcessingState.buffering, ProcessingState.ready => AudioProcessingState.ready, ProcessingState.completed => AudioProcessingState.completed, }; } @override Future playMediaItem(MediaItem item) async { mediaItem.add(item); try { await _player.stop(); await _player.setUrl(item.id); await _player.play(); } on PlayerException catch (e) { playbackState.add(playbackState.value.copyWith( processingState: AudioProcessingState.error, errorMessage: e.message ?? 'Error de reproducción', errorCode: e.code, )); rethrow; } } @override Future play() => _player.play(); @override Future pause() => _player.pause(); @override Future stop() async { await _player.stop(); emisoraActual = null; mediaItem.add(null); await super.stop(); } @override Future seek(Duration position) => _player.seek(position); Future setVolume(double vol) async { _volumen = vol.clamp(0.0, 1.0); await _player.setVolume(_volumen); } @override Future onTaskRemoved() async { await stop(); await _player.dispose(); } }