import'dart:async';导入'package:audio_service/audio_service.dart';导入'package:audio_session/audio_session.dart';导入'package:just_audio/just_audio.dart';void audioPlayerTaskEntrypoint()异步{AudioServiceBackground.run(()=> AudioPlayerTask());}class AudioPlayerTask扩展了BackgroundAudioTask {AudioPlayer _player = new AudioPlayer();AudioProcessingState _skipState;StreamSubscription< PlaybackEvent>_eventSubscription;列表< MediaItem>_queue = [];列表< MediaItem>获取队列=>_队列;int获取索引=>_player.playbackEvent.currentIndex;MediaItem get mediaItem =>索引==空?null:队列[索引];@overrideFuture< void>onStart(Map< String,dynamic>参数)异步{_loadMediaItemsIntoQueue(params);等待_setAudioSession();_propogateEventsFromAudioPlayerToAudioServiceClients();_performSpecialProcessingForStateTransistions();_loadQueue();}void _loadMediaItemsIntoQueue(Map< String,dynamic>参数){_queue.clear();最终列表mediaItems = params ['data'];用于(mediaItems中的var项){final mediaItem = MediaItem.fromJson(item);_queue.add(mediaItem);}}Future< void>_setAudioSession()异步{最后的会话=等待AudioSession.instance;等待session.configure(AudioSessionConfiguration.music());}void _propogateEventsFromAudioPlayerToAudioServiceClients(){_eventSubscription = _player.playbackEventStream.listen((event){_broadcastState();});}void _performSpecialProcessingForStateTransistions(){_player.processingStateStream.listen((state){开关(状态){案例ProcessingState.completed:onStop();休息;案例ProcessingState.ready:_skipState = null;休息;默认:休息;}});}Future< void>_loadQueue()异步{AudioServiceBackground.setQueue(queue);尝试 {等待_player.load(ConcatenatingAudioSource(孩子们:queue.map((item)=> AudioSource.uri(Uri.parse(item.id))).toList(),));_player.durationStream.listen((持续时间){_updateQueueWithCurrentDuration(duration);});onPlay();}抓住(e){print('错误:$ e');onStop();}}void _updateQueueWithCurrentDuration(持续时间){最后的songIndex = _player.playbackEvent.currentIndex;print('当前索引:$ songIndex,持续时间:$ duration');最终modifiedMediaItem = mediaItem.copyWith(持续时间:持续时间);_queue [songIndex] = modifiedMediaItem;AudioServiceBackground.setMediaItem(_queue [songIndex]);AudioServiceBackground.setQueue(_queue);}@overrideFuture< void>onSkipToQueueItem(String mediaId)异步{最终newIndex = queue.indexWhere((item)=> item.id == mediaId);如果(newIndex == -1)返回;_skipState = newIndex>指数?AudioProcessingState.skippingToNext:AudioProcessingState.skippingToPrevious;_player.seek(Duration.zero,index:newIndex);}@overrideFuture< void>onPlay()=>_player.play();@overrideFuture< void>onPause()=>_player.pause();@overrideFuture< void>onSeekTo(Duration position)=>_player.seek(position);@overrideFuture< void>onFastForward()=>_seekRelative(fastForwardInterval);@overrideFuture< void>onRewind()=>_seekRelative(-rewindInterval);@overrideFuture< void>onStop()异步{等待_player.dispose();_eventSubscription.cancel();等待_broadcastState();等待super.onStop();}///从[offset]偏移当前位置.Future< void>_seekRelative(持续时间偏移)异步{var newPosition = _player.position +偏移量;如果(newPosition< Duration.zero)newPosition = Duration.zero;如果(newPosition> mediaItem.duration)newPosition = mediaItem.duration;等待_player.seek(newPosition);}///向所有客户端广播当前状态.Future< void>_broadcastState()异步{等待AudioServiceBackground.setState(控制项:[MediaControl.skipToPrevious,如果(_player.playing)MediaControl.pause否则为MediaControl.play,MediaControl.skipToNext,],androidCompactActions:[0,1,2],processingState:_getProcessingState(),播放:_player.playing,位置:_player.position,bufferedPosition:_player.bufferedPosition,速度:_player.speed,);}///将just_audio的处理状态映射到audio_service的播放中///状态.如果我们处于跳过的中间,则改用[_skipState].AudioProcessingState _getProcessingState(){如果(_skipState!= null)返回_skipState;开关(_player.processingState){案例ProcessingState.none:返回AudioProcessingState.stopped;案例ProcessingState.loading:返回AudioProcessingState.connecting;情况ProcessingState.buffering:返回AudioProcessingState.buffering;案例ProcessingState.ready:返回AudioProcessingState.ready;案例ProcessingState.completed:返回AudioProcessingState.completed;默认:throw Exception(无效状态:$ {_ player.processingState}"));}}} 然后在状态管理类中,我从 AudioService 获得了这样的流: Stream< AudioPlayerState>获取mediaStateStream =>Rx.combineLatest2< Duration,MediaItem,AudioPlayerState>(AudioService.positionStream,AudioService.currentMediaItemStream,(position,mediaItem)=>AudioPlayerState(position,mediaItem.duration)); AudioPlayerState在哪里 class AudioPlayerState {const AudioPlayerState(this.currentTime,this.totalTime);最终持续时间currentTime;最终持续时间totalTime;const AudioPlayerState.initial():this(Duration.zero,Duration.zero);} 然后我在Flutter UI中使用了 StreamBuilder 来收听 mediaStateStream 并更新了我的音频播放器搜索栏小部件.The when you set the MediaItem in audio_service you don't know the song duration yet because just_audio hasn't had a change to tell you at this point.The FAQs say to update the MediaItem like this:modifiedMediaItem = mediaItem.copyWith(duration: duration);AudioServiceBackground.setMediaItem(modifiedMediaItem);But it is unclear to me how or where to do this. The example app in the GitHub repo sidesteps this problem by providing precomputed times. (GitHub issue)How and where do I transfer the duration from just_audio to audio_service so that it can update the listeners?I found something that works so I'm adding an answer below. 解决方案 Once you have your just_audio AudioPlayer set up, you can listen for changes in the duration stream and then make the updates there:_player.durationStream.listen((duration) { final songIndex = _player.playbackEvent.currentIndex; print('current index: $songIndex, duration: $duration'); final modifiedMediaItem = mediaItem.copyWith(duration: duration); _queue[songIndex] = modifiedMediaItem; AudioServiceBackground.setMediaItem(_queue[songIndex]); AudioServiceBackground.setQueue(_queue);});Notes:This is inside your audio_service BackgroundAudioTask class.When I tried using _player.currentIndex directly I was getting strange behavior (the first two songs both had index 0 before the index started incrementing) (GitHub issue). That's why I use the playback event here to get the current index.For my example I'm using a List<MediaItem> for the queue. I didn't actually need to use the setQueue in the final line because by UI wasn't listening for changes in the queue, but I suppose it's good to do anyway.Fuller code exampleHere is my whole background_audio_service.dart for reference. It's an adaptation of the documentation example:import 'dart:async';import 'package:audio_service/audio_service.dart';import 'package:audio_session/audio_session.dart';import 'package:just_audio/just_audio.dart';void audioPlayerTaskEntrypoint() async { AudioServiceBackground.run(() => AudioPlayerTask());}class AudioPlayerTask extends BackgroundAudioTask { AudioPlayer _player = new AudioPlayer(); AudioProcessingState _skipState; StreamSubscription<PlaybackEvent> _eventSubscription; List<MediaItem> _queue = []; List<MediaItem> get queue => _queue; int get index => _player.playbackEvent.currentIndex; MediaItem get mediaItem => index == null ? null : queue[index]; @override Future<void> onStart(Map<String, dynamic> params) async { _loadMediaItemsIntoQueue(params); await _setAudioSession(); _propogateEventsFromAudioPlayerToAudioServiceClients(); _performSpecialProcessingForStateTransistions(); _loadQueue(); } void _loadMediaItemsIntoQueue(Map<String, dynamic> params) { _queue.clear(); final List mediaItems = params['data']; for (var item in mediaItems) { final mediaItem = MediaItem.fromJson(item); _queue.add(mediaItem); } } Future<void> _setAudioSession() async { final session = await AudioSession.instance; await session.configure(AudioSessionConfiguration.music()); } void _propogateEventsFromAudioPlayerToAudioServiceClients() { _eventSubscription = _player.playbackEventStream.listen((event) { _broadcastState(); }); } void _performSpecialProcessingForStateTransistions() { _player.processingStateStream.listen((state) { switch (state) { case ProcessingState.completed: onStop(); break; case ProcessingState.ready: _skipState = null; break; default: break; } }); } Future<void> _loadQueue() async { AudioServiceBackground.setQueue(queue); try { await _player.load(ConcatenatingAudioSource( children: queue.map((item) => AudioSource.uri(Uri.parse(item.id))).toList(), )); _player.durationStream.listen((duration) { _updateQueueWithCurrentDuration(duration); }); onPlay(); } catch (e) { print('Error: $e'); onStop(); } } void _updateQueueWithCurrentDuration(Duration duration) { final songIndex = _player.playbackEvent.currentIndex; print('current index: $songIndex, duration: $duration'); final modifiedMediaItem = mediaItem.copyWith(duration: duration); _queue[songIndex] = modifiedMediaItem; AudioServiceBackground.setMediaItem(_queue[songIndex]); AudioServiceBackground.setQueue(_queue); } @override Future<void> onSkipToQueueItem(String mediaId) async { final newIndex = queue.indexWhere((item) => item.id == mediaId); if (newIndex == -1) return; _skipState = newIndex > index ? AudioProcessingState.skippingToNext : AudioProcessingState.skippingToPrevious; _player.seek(Duration.zero, index: newIndex); } @override Future<void> onPlay() => _player.play(); @override Future<void> onPause() => _player.pause(); @override Future<void> onSeekTo(Duration position) => _player.seek(position); @override Future<void> onFastForward() => _seekRelative(fastForwardInterval); @override Future<void> onRewind() => _seekRelative(-rewindInterval); @override Future<void> onStop() async { await _player.dispose(); _eventSubscription.cancel(); await _broadcastState(); await super.onStop(); } /// Jumps away from the current position by [offset]. Future<void> _seekRelative(Duration offset) async { var newPosition = _player.position + offset; if (newPosition < Duration.zero) newPosition = Duration.zero; if (newPosition > mediaItem.duration) newPosition = mediaItem.duration; await _player.seek(newPosition); } /// Broadcasts the current state to all clients. Future<void> _broadcastState() async { await AudioServiceBackground.setState( controls: [ MediaControl.skipToPrevious, if (_player.playing) MediaControl.pause else MediaControl.play, MediaControl.skipToNext, ], androidCompactActions: [0, 1, 2], processingState: _getProcessingState(), playing: _player.playing, position: _player.position, bufferedPosition: _player.bufferedPosition, speed: _player.speed, ); } /// Maps just_audio's processing state into into audio_service's playing /// state. If we are in the middle of a skip, we use [_skipState] instead. AudioProcessingState _getProcessingState() { if (_skipState != null) return _skipState; switch (_player.processingState) { case ProcessingState.none: return AudioProcessingState.stopped; case ProcessingState.loading: return AudioProcessingState.connecting; case ProcessingState.buffering: return AudioProcessingState.buffering; case ProcessingState.ready: return AudioProcessingState.ready; case ProcessingState.completed: return AudioProcessingState.completed; default: throw Exception("Invalid state: ${_player.processingState}"); } }}Then in my state management class I got a stream from AudioService like this:Stream<AudioPlayerState> get mediaStateStream => Rx.combineLatest2<Duration, MediaItem, AudioPlayerState>( AudioService.positionStream, AudioService.currentMediaItemStream, (position, mediaItem) => AudioPlayerState(position, mediaItem.duration));where AudioPlayerState isclass AudioPlayerState { const AudioPlayerState(this.currentTime, this.totalTime); final Duration currentTime; final Duration totalTime; const AudioPlayerState.initial() : this(Duration.zero, Duration.zero);}And I used a StreamBuilder in the Flutter UI to listen to mediaStateStream and update my audio player seek bar widget. 这篇关于如何使用audio_service和just_audio在Flutter中查找当前歌曲持续时间的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持!
11-01 23:18