Reputation: 925
I am trying to develop an app that presents videos to the user. I am using VideoPlayerController
for loading the videos, and ChewieController
for the UI.
It works great, but when the user closes the app, the video stops. I would like the video to keep playing its audio even when closing the app/locking the device.
I couldn't find anything about it on the VideoPlayerController
and in the ChewieController
documentations.
Is this functionality possible in Flutter and Dart?
Thank you!
Upvotes: 8
Views: 4493
Reputation: 393
here is an update to @Luccas Azamor's answer
i found a way hot to change AudioProcessingState to idle and remove it from background player after the stop button has been pressed
AudioProcessingState _processingState() {
if (videoPlayerController == null) return AudioProcessingState.idle;
if (videoPlayerController.value.isInitialized) {
if (videoPlayerController.value.isPlaying) {
return AudioProcessingState.ready;
} else {
return AudioProcessingState.idle;
}
}
return AudioProcessingState.idle;
}
Upvotes: 0
Reputation: 51
As the video_player package now has the allowBackgroundPlayback option, I created this simple example showing how to integrate video_player and audio service.
// This example demonstrates a simple video_player integration.
import 'dart:async';
import 'package:audio_service/audio_service.dart';
import 'package:flutter/material.dart';
import 'package:video_player/video_player.dart';
// You might want to provide this using dependency injection rather than a
// global variable.
late AudioPlayerHandler _audioHandler;
Future<void> main() async {
_audioHandler = await AudioService.init(
builder: () => AudioPlayerHandler(),
config: const AudioServiceConfig(
androidNotificationChannelId: 'com.ryanheise.myapp.channel.audio',
androidNotificationChannelName: 'Audio playback',
androidNotificationOngoing: true,
),
);
runApp(const MyApp());
}
class MyApp extends StatelessWidget {
const MyApp({Key? key}) : super(key: key);
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Audio Service Demo',
theme: ThemeData(primarySwatch: Colors.blue),
home: const MainScreen(),
);
}
}
class MainScreen extends StatefulWidget {
const MainScreen({Key? key}) : super(key: key);
@override
_MainScreenState createState() => _MainScreenState();
}
class _MainScreenState extends State<MainScreen> {
late VideoPlayerController _controller;
@override
void initState() {
super.initState();
_controller = VideoPlayerController.network('https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4',
videoPlayerOptions: VideoPlayerOptions(allowBackgroundPlayback: true))
..initialize().then((_) {
_audioHandler.setVideoFunctions(_controller.play, _controller.pause, _controller.seekTo, () {
_controller.seekTo(Duration.zero);
_controller.pause();
});
// So that our clients (the Flutter UI and the system notification) know
// what state to display, here we set up our audio handler to broadcast all
// playback state changes as they happen via playbackState...
_audioHandler.initializeStreamController(_controller);
_audioHandler.playbackState.addStream(_audioHandler.streamController.stream);
// Ensure the first frame is shown after the video is initialized, even before the play button has been pressed.
setState(() {});
});
}
@override
void dispose() {
// Close the stream
_audioHandler.streamController.close();
super.dispose();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text('Audio Service Demo'),
),
body: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Center(
child: _controller.value.isInitialized
? AspectRatio(
aspectRatio: _controller.value.aspectRatio,
child: VideoPlayer(_controller),
)
: Container(),
),
// Play/pause/stop buttons.
StreamBuilder<bool>(
stream: _audioHandler.playbackState.map((state) => state.playing).distinct(),
builder: (context, snapshot) {
final playing = snapshot.data ?? false;
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
_button(Icons.fast_rewind, _audioHandler.rewind),
if (playing) _button(Icons.pause, _audioHandler.pause) else _button(Icons.play_arrow, _audioHandler.play),
_button(Icons.stop, _audioHandler.stop),
_button(Icons.fast_forward, _audioHandler.fastForward),
],
);
},
),
// Display the processing state.
StreamBuilder<AudioProcessingState>(
stream: _audioHandler.playbackState.map((state) => state.processingState).distinct(),
builder: (context, snapshot) {
final processingState = snapshot.data ?? AudioProcessingState.idle;
return Text("Processing state: ${(processingState)}");
},
),
],
),
),
);
}
IconButton _button(IconData iconData, VoidCallback onPressed) => IconButton(
icon: Icon(iconData),
iconSize: 64.0,
onPressed: onPressed,
);
}
class MediaState {
final MediaItem? mediaItem;
final Duration position;
MediaState(this.mediaItem, this.position);
}
/// An [AudioHandler] for playing a single item.
class AudioPlayerHandler extends BaseAudioHandler with SeekHandler {
late StreamController<PlaybackState> streamController;
static final _item = MediaItem(
id: 'https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3',
album: "Science Friday",
title: "A Salute To Head-Scratching Science",
artist: "Science Friday and WNYC Studios",
duration: const Duration(milliseconds: 5739820),
artUri: Uri.parse('https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg'),
);
Function? _videoPlay;
Function? _videoPause;
Function? _videoSeek;
Function? _videoStop;
void setVideoFunctions(Function play, Function pause, Function seek, Function stop) {
_videoPlay = play;
_videoPause = pause;
_videoSeek = seek;
_videoStop = stop;
mediaItem.add(_item);
}
/// Initialise our audio handler.
AudioPlayerHandler();
// In this simple example, we handle only 4 actions: play, pause, seek and
// stop. Any button press from the Flutter UI, notification, lock screen or
// headset will be routed through to these 4 methods so that you can handle
// your audio playback logic in one place.
@override
Future<void> play() async => _videoPlay!();
@override
Future<void> pause() async => _videoPause!();
@override
Future<void> seek(Duration position) async => _videoSeek!(position);
@override
Future<void> stop() async => _videoStop!();
void initializeStreamController(VideoPlayerController? videoPlayerController) {
bool _isPlaying() => videoPlayerController?.value.isPlaying ?? false;
AudioProcessingState _processingState() {
if (videoPlayerController == null) return AudioProcessingState.idle;
if (videoPlayerController.value.isInitialized) return AudioProcessingState.ready;
return AudioProcessingState.idle;
}
Duration _bufferedPosition() {
DurationRange? currentBufferedRange = videoPlayerController?.value.buffered.firstWhere((durationRange) {
Duration position = videoPlayerController.value.position;
bool isCurrentBufferedRange = durationRange.start < position && durationRange.end > position;
return isCurrentBufferedRange;
});
if (currentBufferedRange == null) return Duration.zero;
return currentBufferedRange.end;
}
void _addVideoEvent() {
streamController.add(PlaybackState(
controls: [
MediaControl.rewind,
if (_isPlaying()) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.fastForward,
],
systemActions: const {
MediaAction.seek,
MediaAction.seekForward,
MediaAction.seekBackward,
},
androidCompactActionIndices: const [0, 1, 3],
processingState: _processingState(),
playing: _isPlaying(),
updatePosition: videoPlayerController?.value.position ?? Duration.zero,
bufferedPosition: _bufferedPosition(),
speed: videoPlayerController?.value.playbackSpeed ?? 1.0,
));
}
void startStream() {
videoPlayerController?.addListener(_addVideoEvent);
}
void stopStream() {
videoPlayerController?.removeListener(_addVideoEvent);
streamController.close();
}
streamController = StreamController<PlaybackState>(onListen: startStream, onPause: stopStream, onResume: startStream, onCancel: stopStream);
}
}
Upvotes: 4
Reputation: 61
I've been using the better_player package. It's quite good uses video_player
and chewie
and also has support for player notification and PiP.
And don't forget to enable the background audio capability on your xcode. xcode-audio-capability
Upvotes: 0
Reputation: 9019
Unfortunately Flutter's video_player
package doesn't support background video or audio playing. But you can use flutter_playout
which wraps ExoPlayer
on Android and AVPlayer
framework on iOS with the ability to playback video in background or even lock screen. You can find out more about it here. Below is an example code provided by library's GitHub repo which plays a video and it keeps playing in background
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter_playout/multiaudio/HLSManifestLanguage.dart';
import 'package:flutter_playout/multiaudio/MultiAudioSupport.dart';
import 'package:flutter_playout/player_observer.dart';
import 'package:flutter_playout/player_state.dart';
import 'package:flutter_playout/video.dart';
import 'package:flutter_playout_example/hls/getManifestLanguages.dart';
class VideoPlayout extends StatefulWidget {
final PlayerState desiredState;
final bool showPlayerControls;
const VideoPlayout({Key key, this.desiredState, this.showPlayerControls})
: super(key: key);
@override
_VideoPlayoutState createState() => _VideoPlayoutState();
}
class _VideoPlayoutState extends State<VideoPlayout>
with PlayerObserver, MultiAudioSupport {
final String _url = null;
List<HLSManifestLanguage> _hlsLanguages = List<HLSManifestLanguage>();
@override
void initState() {
super.initState();
Future.delayed(Duration.zero, _getHLSManifestLanguages);
}
Future<void> _getHLSManifestLanguages() async {
if (!Platform.isIOS && _url != null && _url.isNotEmpty) {
_hlsLanguages = await getManifestLanguages(_url);
setState(() {});
}
}
@override
Widget build(BuildContext context) {
return Container(
child: Column(
children: <Widget>[
/* player */
AspectRatio(
aspectRatio: 16 / 9,
child: Video(
autoPlay: true,
showControls: widget.showPlayerControls,
title: "MTA International",
subtitle: "Reaching The Corners Of The Earth",
preferredAudioLanguage: "eng",
isLiveStream: true,
position: 0,
url: _url,
onViewCreated: _onViewCreated,
desiredState: widget.desiredState,
),
),
/* multi language menu */
_hlsLanguages.length < 2 && !Platform.isIOS
? Container()
: Container(
child: Row(
children: _hlsLanguages
.map((e) => MaterialButton(
child: Text(
e.name,
style: Theme.of(context)
.textTheme
.button
.copyWith(color: Colors.white),
),
onPressed: () {
setPreferredAudioLanguage(e.code);
},
))
.toList(),
),
),
],
),
);
}
void _onViewCreated(int viewId) {
listenForVideoPlayerEvents(viewId);
enableMultiAudioSupport(viewId);
}
@override
void onPlay() {
// TODO: implement onPlay
super.onPlay();
}
@override
void onPause() {
// TODO: implement onPause
super.onPause();
}
@override
void onComplete() {
// TODO: implement onComplete
super.onComplete();
}
@override
void onTime(int position) {
// TODO: implement onTime
super.onTime(position);
}
@override
void onSeek(int position, double offset) {
// TODO: implement onSeek
super.onSeek(position, offset);
}
@override
void onDuration(int duration) {
// TODO: implement onDuration
super.onDuration(duration);
}
@override
void onError(String error) {
// TODO: implement onError
super.onError(error);
}
}
Upvotes: 8