Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(ios): fix external text tracks crashes with m3u8 files #3330

Merged
merged 4 commits into from
Nov 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/pages/component/props.md
Original file line number Diff line number Diff line change
Expand Up @@ -708,6 +708,7 @@ subtitleStyle={{ paddingBottom: 50, fontSize: 20 }}

### `textTracks`
Load one or more "sidecar" text tracks. This takes an array of objects representing each track. Each object should have the format:
> ⚠️ This feature does not work with HLS playlists (e.g m3u8) on iOS

Property | Description
--- | ---
Expand Down
37 changes: 18 additions & 19 deletions ios/Video/Features/RCTVideoUtils.swift
Original file line number Diff line number Diff line change
Expand Up @@ -184,24 +184,24 @@ enum RCTVideoUtils {
let mixComposition:AVMutableComposition = AVMutableComposition()

let videoAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first
let videoCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID:kCMPersistentTrackID_Invalid)
do {
try videoCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: videoAsset,
at: .zero)
} catch {

// we need videoAsset asset to be not null to get durration later
if videoAsset == nil {
return mixComposition
}

let videoCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID:kCMPersistentTrackID_Invalid)
try? videoCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: videoAsset,
at: .zero)

let audioAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first
let audioCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID:kCMPersistentTrackID_Invalid)
do {
try audioCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: audioAsset,
at: .zero)
} catch {
}
try? audioCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration),
of: audioAsset,
at: .zero)

return mixComposition
}
Expand All @@ -226,12 +226,11 @@ enum RCTVideoUtils {
validTextTracks.append(textTracks[i])
let textCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text,
preferredTrackID:kCMPersistentTrackID_Invalid)
do {
try textCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
if videoAsset != nil {
try? textCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset!.timeRange.duration),
of: textTrackAsset,
at: .zero)
} catch {
}
}
}
Expand Down Expand Up @@ -322,7 +321,7 @@ enum RCTVideoUtils {
}

static func createMetadataItem(for identifier: AVMetadataIdentifier,
value: Any) -> AVMetadataItem {
value: Any) -> AVMetadataItem {
let item = AVMutableMetadataItem()
item.identifier = identifier
item.value = value as? NSCopying & NSObjectProtocol
Expand Down
93 changes: 45 additions & 48 deletions ios/Video/RCTVideo.swift
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
@objc var onRestoreUserInterfaceForPictureInPictureStop: RCTDirectEventBlock?
@objc var onGetLicense: RCTDirectEventBlock?
@objc var onReceiveAdEvent: RCTDirectEventBlock?

@objc func _onPictureInPictureStatusChanged() {
onPictureInPictureStatusChanged?([ "isActive": NSNumber(value: true)])
}
Expand Down Expand Up @@ -200,7 +200,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
_player?.pause()
_player?.rate = 0.0
}

@objc func applicationDidBecomeActive(notification: NSNotification!) {
if _playInBackground || _playWhenInactive || _paused { return }

Expand Down Expand Up @@ -305,14 +305,14 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
if let uri = source.uri, uri.starts(with: "ph://") {
return Promise {
RCTVideoUtils.preparePHAsset(uri: uri).then { asset in
return self.playerItemPrepareText(asset:asset, assetOptions:nil)
return self.playerItemPrepareText(asset:asset, assetOptions:nil, uri: source.uri ?? "")
}
}
}
guard let assetResult = RCTVideoUtils.prepareAsset(source: source),
let asset = assetResult.asset,
let assetOptions = assetResult.assetOptions else {
DebugLog("Could not find video URL in source '\(self._source)'")
let asset = assetResult.asset,
let assetOptions = assetResult.assetOptions else {
DebugLog("Could not find video URL in source '\(String(describing: self._source))'")
throw NSError(domain: "", code: 0, userInfo: nil)
}

Expand All @@ -332,7 +332,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
reactTag: self.reactTag
)
}
return Promise{self.playerItemPrepareText(asset: asset, assetOptions:assetOptions)}

return Promise{self.playerItemPrepareText(asset: asset, assetOptions:assetOptions, uri: source.uri ?? "")}
}.then{[weak self] (playerItem:AVPlayerItem!) in
guard let self = self else {throw NSError(domain: "", code: 0, userInfo: nil)}

Expand Down Expand Up @@ -390,8 +391,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
_localSourceEncryptionKeyScheme = keyScheme
}

func playerItemPrepareText(asset:AVAsset!, assetOptions:NSDictionary?) -> AVPlayerItem {
if (_textTracks == nil) || _textTracks?.count==0 {
func playerItemPrepareText(asset:AVAsset!, assetOptions:NSDictionary?, uri: String) -> AVPlayerItem {
if (_textTracks == nil) || _textTracks?.count==0 || (uri.hasSuffix(".m3u8")) {
return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset))
}

Expand All @@ -409,37 +410,37 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH

return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition))
}

func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem {
var mapping: [AVMetadataIdentifier: Any] = [:]

if let title = _source?.title {
mapping[.commonIdentifierTitle] = title
}

if let subtitle = _source?.subtitle {
mapping[.iTunesMetadataTrackSubTitle] = subtitle
}

if let description = _source?.description {
mapping[.commonIdentifierDescription] = description
}
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There again spacing change.
But I accept these one 😆


if let customImageUri = _source?.customImageUri,
let imageData = RCTVideoUtils.createImageMetadataItem(imageUri: customImageUri) {
mapping[.commonIdentifierArtwork] = imageData
}

if #available(iOS 12.2, *), !mapping.isEmpty {
playerItem.externalMetadata = RCTVideoUtils.createMetadataItems(for: mapping)
}
#if os(tvOS)

#if os(tvOS)
if let chapters = _chapters {
playerItem.navigationMarkerGroups = RCTVideoTVUtils.makeNavigationMarkerGroups(chapters)
}
#endif
#endif

return playerItem
}

Expand All @@ -448,7 +449,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
@objc
func setResizeMode(_ mode: String) {
var resizeMode: AVLayerVideoGravity = .resizeAspect

switch mode {
case "contain":
resizeMode = .resizeAspect
Expand All @@ -465,13 +466,13 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
default:
resizeMode = .resizeAspect
}

if _controls {
_playerViewController?.videoGravity = resizeMode
} else {
_playerLayer?.videoGravity = resizeMode
}

_resizeMode = mode
}

Expand Down Expand Up @@ -585,17 +586,17 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
paused:wasPaused,
seekTime:seekTime.floatValue,
seekTolerance:seekTolerance.floatValue)
.then{ [weak self] (finished:Bool) in
guard let self = self else { return }
.then{ [weak self] (finished:Bool) in
guard let self = self else { return }

self._playerObserver.addTimeObserverIfNotSet()
if !wasPaused {
self.setPaused(false)
}
self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))),
"seekTime": seekTime,
"target": self.reactTag])
}.catch{_ in }
self._playerObserver.addTimeObserverIfNotSet()
if !wasPaused {
self.setPaused(false)
}
self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))),
"seekTime": seekTime,
"target": self.reactTag])
}.catch{_ in }

_pendingSeek = false
}
Expand Down Expand Up @@ -623,9 +624,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
RCTPlayerOperations.configureAudio(ignoreSilentSwitch:_ignoreSilentSwitch, mixWithOthers:_mixWithOthers, audioOutput:_audioOutput)
do {
if audioOutput == "speaker" {
#if os(iOS)
#if os(iOS)
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
#endif
#endif
} else if audioOutput == "earpiece" {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.none)
}
Expand Down Expand Up @@ -678,10 +679,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
}
}


func applyModifiers() {
if let video = _player?.currentItem,
video == nil || video.status != AVPlayerItem.Status.readyToPlay {
video == nil || video.status != AVPlayerItem.Status.readyToPlay {
return
}
if _muted {
Expand Down Expand Up @@ -719,8 +719,6 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
_repeat = `repeat`
}



@objc
func setSelectedAudioTrack(_ selectedAudioTrack:NSDictionary?) {
setSelectedAudioTrack(SelectedTrackCriteria(selectedAudioTrack))
Expand Down Expand Up @@ -758,7 +756,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
// in case textTracks was set after selectedTextTrack
if (_selectedTextTrackCriteria != nil) {setSelectedTextTrack(_selectedTextTrackCriteria)}
}

@objc
func setChapters(_ chapters:[NSDictionary]?) {
setChapters(chapters?.map { Chapter($0) })
Expand Down Expand Up @@ -934,7 +932,6 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
}
}


func videoPlayerViewControllerDidDismiss(playerViewController:AVPlayerViewController) {
if _playerViewController == playerViewController && _fullscreenPlayerPresented {
_fullscreenPlayerPresented = false
Expand Down Expand Up @@ -1236,31 +1233,31 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH

func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange<Float>) {
guard let _player = _player else { return }

if(player.rate == change.oldValue && change.oldValue != nil) {
return
}

onPlaybackRateChange?(["playbackRate": NSNumber(value: _player.rate),
"target": reactTag as Any])

onVideoPlaybackStateChanged?(["isPlaying": _player.rate != 0,
"target": reactTag as Any])
"target": reactTag as Any])

if _playbackStalled && _player.rate > 0 {
onPlaybackResume?(["playbackRate": NSNumber(value: _player.rate),
"target": reactTag as Any])
_playbackStalled = false
}
}

func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange<Float>) {
guard let _player = _player else { return }

if(player.rate == change.oldValue && change.oldValue != nil) {
return
}

onVolumeChange?(["volume": NSNumber(value: _player.volume),
"target": reactTag as Any])
}
Expand Down
6 changes: 3 additions & 3 deletions ios/VideoCaching/RCTVideoCachingHandler.swift
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import Promises
class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {

private var _videoCache:RCTVideoCache! = RCTVideoCache.sharedInstance()
var playerItemPrepareText: ((AVAsset?, NSDictionary?) -> AVPlayerItem)?
var playerItemPrepareText: ((AVAsset?, NSDictionary?) -> AVPlayerItem, uri: String)?

override init() {
super.init()
Expand All @@ -33,12 +33,12 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
case .missingFileExtension:
DebugLog("Could not generate cache key for uri '\(uri)'. It is currently not supported to cache urls that do not include a file extension. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md")
let asset:AVURLAsset! = AVURLAsset(url: url!, options:options as! [String : Any])
return playerItemPrepareText(asset, options)
return playerItemPrepareText(asset, options, "")

case .unsupportedFileExtension:
DebugLog("Could not generate cache key for uri '\(uri)'. The file extension of that uri is currently not supported. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md")
let asset:AVURLAsset! = AVURLAsset(url: url!, options:options as! [String : Any])
return playerItemPrepareText(asset, options)
return playerItemPrepareText(asset, options, "")

default:
if let cachedAsset = cachedAsset {
Expand Down