Reputation: 121
I am developing simple app in SwiftUI for one internet radio. It uses AVPlayer for play the stream available at given url. And that works perfectly. I have also set up AVSession in AppDelegate, so the app plays in background, stops playing while the call is incoming and resumes playing after the call. This all works fine. However, I wasn't able neither to bring the remote control on lock screen nor showing app in Player tile in Control Center.
The app is written using SwiftUI, I am also moving from traditional completion blocks and targets into Combine. I have created separate class Player, which is ObservableObject (and observed by ContentView), where I set up AVPlayer, AVPlayerItem (with given URL for stream). And all works fine. App updates the state on change of player state. I am not using AVPlayerViewController, since I don't need one. On initialization of that Player object I am also setting up Remote Transport Controls using this method (I moved from setting targets to publishers).
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.publisher(for: \.playCommand)
.sink(receiveValue: {_ in self.play() })
.store(in: &cancellables)
commandCenter.publisher(for: \.stopCommand)
.sink(receiveValue: {_ in self.stop() })
.store(in: &cancellables)
}
Either I use the original version of that method provided by Apple, or my own version (as shown above) the Remote Control doesn't show up, and the Control Center tile player is not updated.
Of course I use the method provided by Apple for updating NowPlaying
func setupNowPlaying() {
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "Radio"
if let image = UIImage(systemName: "radio") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player?.currentItem?.currentTime().seconds ?? ""
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = player?.currentItem?.asset.duration.seconds ?? ""
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = isPlaying ? 1 : 0
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
I don't know where is the problem. Is it the way I set up Remote Transport Controls? The flow is like this:
Observable Player object with AVPlayer and setup for Remote Transport Controls and NowPlaying -> observed by -> Content View.
Here is full listing for Player class:
import Foundation
import AVKit
import Combine
import MediaPlayer
class Player: ObservableObject {
private let streamURL = URL(string: "https://stream.rcs.revma.com/ypqt40u0x1zuv")!
@Published var status: Player.Status = .stopped
@Published var isPlaying = false
@Published var showError = false
@Published var isMuted = false
var player: AVPlayer?
var cancellables = Set<AnyCancellable>()
init() {
setupRemoteTransportControls()
}
func setupPlayer() {
let item = AVPlayerItem(url: streamURL)
player = AVPlayer(playerItem: item)
player?.allowsExternalPlayback = true
}
func play() {
handleInterruption()
handleRouteChange()
setupPlayer()
player?.play()
player?.currentItem?.publisher(for: \.status)
.sink(receiveValue: { status in
self.handle(status: status)
})
.store(in: &cancellables)
}
func stop() {
player?.pause()
player = nil
status = .stopped
}
func mute() {
player?.isMuted.toggle()
isMuted.toggle()
}
func handle(status: AVPlayerItem.Status) {
switch status {
case .unknown:
self.status = .waiting
self.isPlaying = false
case .readyToPlay:
self.status = .ready
self.isPlaying = true
self.setupNowPlaying()
case .failed:
self.status = .failed
self.isPlaying = false
self.showError = true
self.setupNowPlaying()
default:
self.status = .stopped
self.isPlaying = false
self.setupNowPlaying()
}
}
func handleInterruption() {
NotificationCenter.default.publisher(for: AVAudioSession.interruptionNotification)
.map(\.userInfo)
.compactMap {
$0?[AVAudioSessionInterruptionTypeKey] as? UInt
}
.map { AVAudioSession.InterruptionType(rawValue: $0)}
.sink { (interruptionType) in
self.handle(interruptionType: interruptionType)
}
.store(in: &cancellables)
}
func handle(interruptionType: AVAudioSession.InterruptionType?) {
switch interruptionType {
case .began:
self.stop()
case .ended:
self.play()
default:
break
}
}
typealias UInfo = [AnyHashable: Any]
func handleRouteChange() {
NotificationCenter.default.publisher(for: AVAudioSession.routeChangeNotification)
.map(\.userInfo)
.compactMap({ (userInfo) -> (UInfo?, UInt?) in
(userInfo, userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt)
})
.compactMap({ (result) -> (UInfo?, AVAudioSession.RouteChangeReason?) in
(result.0, AVAudioSession.RouteChangeReason(rawValue: result.1 ?? 0))
})
.sink(receiveValue: { (result) in
self.handle(reason: result.1, userInfo: result.0)
})
.store(in: &cancellables)
}
func handle(reason: AVAudioSession.RouteChangeReason?, userInfo: UInfo?) {
switch reason {
case .newDeviceAvailable:
let session = AVAudioSession.sharedInstance()
for output in session.currentRoute.outputs where output.portType == AVAudioSession.Port.headphones {
DispatchQueue.main.async {
self.play()
}
}
case .oldDeviceUnavailable:
if let previousRoute = userInfo?[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription {
for output in previousRoute.outputs where output.portType == AVAudioSession.Port.headphones {
DispatchQueue.main.sync {
self.stop()
}
break
}
}
default:
break
}
}
}
extension Player {
enum Status {
case waiting, ready, failed, stopped
}
}
extension Player {
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.publisher(for: \.playCommand)
.sink(receiveValue: {_ in self.play() })
.store(in: &cancellables)
commandCenter.publisher(for: \.stopCommand)
.sink(receiveValue: {_ in self.stop() })
.store(in: &cancellables)
}
func setupNowPlaying() {
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "Radio"
if let image = UIImage(systemName: "radio") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player?.currentItem?.currentTime().seconds ?? ""
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = player?.currentItem?.asset.duration.seconds ?? ""
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = isPlaying ? 1 : 0
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
}
Upvotes: 0
Views: 784
Reputation: 121
It appeared that I need to add in AppDelegate in application(:didFinishLaunchWithOptions) one line of code:
UIApplication.shared.beginReceivingRemoteControlEvents()
That solved the problem. Now remote controller is visible on lock screen and it also works in Control Center.
Once additional fix. Changing targets to publisher in setupRemoteTransportControls() in my Player object didn't work. So I switched back to setting targets like this.
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { event in
self.play()
return .success
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { event in
self.stop()
return .success
}
}
Upvotes: 0