AndrewBoryk / ABMediaView

Media view which subclasses UIImageView, and can display & load images, videos, GIFs, and audio and from the web, and has functionality to minimize from fullscreen, as well as show GIF previews for videos.
MIT License
82 stars 9 forks source link

Loop specific range #2

Open mehdok opened 7 years ago

mehdok commented 7 years ago

is it possible to loop specific video range eg. 0...10 second ?

AndrewBoryk commented 7 years ago

Hmm it is certainly possible. Could you elaborate on a possible use-case for this?

mehdok commented 7 years ago

It may not be a common usage, but i have a list of online video that must played in a loop (from 0 to 10), i can load the video in stream and when the playback hit the time just seek to zero and play it again, but it is a huge waste of resource, suppose the video is 300 sec and i just want the first 10 sec, but loading the stream will load all 300 sec. so i'm seeking for doing this in efficient way, any help or suggestion would be appreciated. tnx in advance.

AndrewBoryk commented 7 years ago

I see what you mean now, I'm going to look into the best possible way to implement this. If you come up with something in the next few days, let me know.

Also, if it seems like I haven't added much to this library in the past couple of months, it is because I have been working on a Swift version.

mehdok commented 7 years ago

based on this answer i have created my own looper that do exactly what i want (loop trough 6 second of video without loading all stream data), but there is 2 problem, first the video has no sound :) and second the preload functionality is missing, any idea to solve this would be appreciated.

`// // RangeLooper.swift

import AVFoundation

class RangeLooper: NSObject { // MARK: Types

private struct ObserverContexts {
    static var playerStatus = 0

    static var playerStatusKey = "status"

    static var currentItem = 0

    static var currentItemKey = "currentItem"

    static var currentItemStatus = 0

    static var currentItemStatusKey = "currentItem.status"

    static var urlAssetDurationKey = "duration"

    static var urlAssetPlayableKey = "playable"
}

// MARK: Properties

private var player: AVQueuePlayer?

private var playerLayer: AVPlayerLayer?

private var isObserving = false

private var numberOfTimesPlayed = 0

private let numberOfTimesToPlay: Int

private let videoURL: URL

// MARK: Looper

required init(videoURL: URL, loopCount: Int) {
    self.videoURL = videoURL
    self.numberOfTimesToPlay = loopCount

    super.init()
}

func start(in parentLayer: CALayer) {
    stop()

    player = AVQueuePlayer()
    playerLayer = AVPlayerLayer(player: player)
    playerLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill

    guard let playerLayer = playerLayer else { fatalError("Error creating player layer") }
    playerLayer.frame = parentLayer.bounds
    parentLayer.addSublayer(playerLayer)

    let videoAsset = AVURLAsset(url: videoURL)

    let start = CMTime(seconds: 0, preferredTimescale: 1)
    let end = CMTime(seconds: 6, preferredTimescale: 1)
    let timeRange = CMTimeRangeMake(start, end)

    let composition = AVMutableComposition()
    let videoTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
    try! videoTrack.insertTimeRange(timeRange, of: videoAsset.tracks(withMediaType: AVMediaTypeVideo).first!, at: CMTimeMake(0, 1))

    videoAsset.loadValuesAsynchronously(forKeys: [ObserverContexts.urlAssetDurationKey, ObserverContexts.urlAssetPlayableKey]) {
        /*
         The asset invokes its completion handler on an arbitrary queue
         when loading is complete. Because we want to access our AVQueuePlayer
         in our ensuing set-up, we must dispatch our handler to the main
         queue.
         */
        DispatchQueue.main.async(execute: {
            var durationError: NSError?
            let durationStatus = videoAsset.statusOfValue(forKey: ObserverContexts.urlAssetDurationKey, error: &durationError)
            guard durationStatus == .loaded else { fatalError("Failed to load duration property with error: \(durationError)") }

            var playableError: NSError?
            let playableStatus = videoAsset.statusOfValue(forKey: ObserverContexts.urlAssetPlayableKey, error: &playableError)
            guard playableStatus == .loaded else { fatalError("Failed to read playable duration property with error: \(playableError)") }

            guard videoAsset.isPlayable else {
                print("Can't loop since asset is not playable")
                return
            }

            guard CMTimeCompare(videoAsset.duration, CMTime(value:1, timescale:100)) >= 0 else {
                print("Can't loop since asset duration too short. Duration is(\(CMTimeGetSeconds(videoAsset.duration)) seconds")
                return
            }

            /*
             Based on the duration of the asset, we decide the number of player
             items to add to demonstrate gapless playback of the same asset.
             */
            let numberOfPlayerItems = (Int)(1.0 / CMTimeGetSeconds(videoAsset.duration)) + 2

            for _ in 1...numberOfPlayerItems {
                let loopItem = AVPlayerItem(asset: composition)
                self.player?.insert(loopItem, after: nil)
            }

            self.startObserving()
            self.numberOfTimesPlayed = 0
            self.player?.play()
        })
    }
}

func stop() {
    player?.pause()
    stopObserving()

    player?.removeAllItems()
    player = nil

    playerLayer?.removeFromSuperlayer()
    playerLayer = nil
}

// MARK: Convenience

private func startObserving() {
    guard let player = player, !isObserving else { return }

    player.addObserver(self, forKeyPath: ObserverContexts.playerStatusKey, options: .new, context: &ObserverContexts.playerStatus)
    player.addObserver(self, forKeyPath: ObserverContexts.currentItemKey, options: .old, context: &ObserverContexts.currentItem)
    player.addObserver(self, forKeyPath: ObserverContexts.currentItemStatusKey, options: .new, context: &ObserverContexts.currentItemStatus)

    isObserving = true
}

private func stopObserving() {
    guard let player = player, isObserving else { return }

    player.removeObserver(self, forKeyPath: ObserverContexts.playerStatusKey, context: &ObserverContexts.playerStatus)
    player.removeObserver(self, forKeyPath: ObserverContexts.currentItemKey, context: &ObserverContexts.currentItem)
    player.removeObserver(self, forKeyPath: ObserverContexts.currentItemStatusKey, context: &ObserverContexts.currentItemStatus)

    isObserving = false
}

// MARK: KVO

override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
    if context == &ObserverContexts.playerStatus {
        guard let newPlayerStatus = change?[.newKey] as? AVPlayerStatus else { return }

        if newPlayerStatus == AVPlayerStatus.failed {
            print("End looping since player has failed with error: \(player?.error)")
            stop()
        }
    }
    else if context == &ObserverContexts.currentItem {
        guard let player = player else { return }

        if player.items().isEmpty {
            print("Play queue emptied out due to bad player item. End looping")
            stop()
        }
        else {
            // If `loopCount` has been set, check if looping needs to stop.
            if numberOfTimesToPlay > 0 {
                numberOfTimesPlayed = numberOfTimesPlayed + 1

                if numberOfTimesPlayed >= numberOfTimesToPlay {
                    print("Looped \(numberOfTimesToPlay) times. Stopping.");
                    stop()
                }
            }

            /*
             Append the previous current item to the player's queue. An initial
             change from a nil currentItem yields NSNull here. Check to make
             sure the class is AVPlayerItem before appending it to the end
             of the queue.
             */
            if let itemRemoved = change?[.oldKey] as? AVPlayerItem {
                itemRemoved.seek(to: kCMTimeZero)

                stopObserving()
                player.insert(itemRemoved, after: nil)
                startObserving()
            }
        }
    }
    else if context == &ObserverContexts.currentItemStatus {
        guard let newPlayerItemStatus = change?[.newKey] as? AVPlayerItemStatus else { return }

        if newPlayerItemStatus == .failed {
            print("End looping since player item has failed with error: \(player?.currentItem?.error)")
            stop()
        }
    }
    else {
        super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
    }
}

} `

mehdok commented 7 years ago

The complete code with audio on and preload functionality, may be you can use it in your great library

`// // RangeLooper.swift

//

import AVFoundation import UIKit

enum PlaybackError: Error { case videoTrackIsNil case playbackDurationIsNil case playableStatusIsNil case thumnailNil }

protocol RangeLooperDelegate: class { func onPlayerStatusChanged(status: AVPlayerLooperStatus) func thumbnailIsReady(image: UIImage?) func playbackStarted() func onLoadError(error: PlaybackError) }

class RangeLooper: NSObject { // MARK: Types

private struct ObserverContexts {
    static var playerStatus = 0

    static var playerStatusKey = "status"

    static var currentItem = 0

    static var currentItemKey = "currentItem"

    static var currentItemStatus = 0

    static var currentItemStatusKey = "currentItem.status"

    static var urlAssetDurationKey = "duration"

    static var urlAssetPlayableKey = "playable"
}

// MARK: Properties

private var player: AVQueuePlayer?

private var playerLayer: AVPlayerLayer?

private var isObserving = false

private var numberOfTimesPlayed = 0

private let numberOfTimesToPlay: Int

private let videoURL: URL

private var itemReady = false
var visible = false
var parentLayer: CALayer!
var numberOfPlayerItems: Int = 1
let composition = AVMutableComposition()

weak var delegate: RangeLooperDelegate? = nil

// MARK: Looper

required init(videoURL: URL, loopCount: Int) {
    self.videoURL = videoURL
    self.numberOfTimesToPlay = loopCount

    super.init()

    loadAssetItem()
}

func loadAssetItem() {
    DispatchQueue.global().async {
        let videoAsset = AVURLAsset(url: self.videoURL)

        self.sendThumbnail(videoAsset: videoAsset)

        let start = CMTime(seconds: 0, preferredTimescale: 1)
        let end = CMTime(seconds: 6, preferredTimescale: 1)
        let timeRange = CMTimeRangeMake(start, end)

        let videoTrack: AVMutableCompositionTrack = self.composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

        let audioTrack: AVMutableCompositionTrack = self.composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

        do {
            let track = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first
            let audio = videoAsset.tracks(withMediaType: AVMediaTypeAudio).first

            guard let _ = track else {
                throw "video track is nil"
            }

            guard let _ = audio else {
                throw "audio track is nil"
            }

            try videoTrack.insertTimeRange(timeRange, of: track!, at: CMTimeMake(0, 1))
            try audioTrack.insertTimeRange(timeRange, of: audio!, at: CMTimeMake(0, 1))
        } catch _ {
            self.delegate?.onLoadError(error: .videoTrackIsNil)
            return
        }

        print("composition created")
        DispatchQueue.main.async {
            self.loadAssetAsync(videoAsset: videoAsset, composition: self.composition)
        }
    }
}

func sendThumbnail(videoAsset: AVURLAsset) {
    do {
        let imageGenerator = AVAssetImageGenerator(asset: videoAsset)
        let time = CMTimeMake(1, 1)
        let imageRef = try imageGenerator.copyCGImage(at: time, actualTime: nil)
        let thumbnail = UIImage(cgImage: imageRef)

        DispatchQueue.main.async {
            self.delegate?.thumbnailIsReady(image: thumbnail)
        }
    } catch _ {
        self.delegate?.onLoadError(error: .thumnailNil)
    }
}

func loadAssetAsync(videoAsset: AVURLAsset, composition: AVMutableComposition) {
    videoAsset.loadValuesAsynchronously(forKeys: [ObserverContexts.urlAssetDurationKey, ObserverContexts.urlAssetPlayableKey]) {
        DispatchQueue.main.async(execute: {
            var durationError: NSError?
            let durationStatus = videoAsset.statusOfValue(forKey: ObserverContexts.urlAssetDurationKey, error: &durationError)
            guard durationStatus == .loaded else {
                self.delegate?.onLoadError(error: .playbackDurationIsNil)
                return
            }

            var playableError: NSError?
            let playableStatus = videoAsset.statusOfValue(forKey: ObserverContexts.urlAssetPlayableKey, error: &playableError)
            guard playableStatus == .loaded else {
                self.delegate?.onLoadError(error: .playableStatusIsNil)
                return
            }

            guard videoAsset.isPlayable else {
                print("Can't loop since asset is not playable")
                return
            }

            guard CMTimeCompare(videoAsset.duration, CMTime(value:1, timescale:100)) >= 0 else {
                print("Can't loop since asset duration too short. Duration is(\(CMTimeGetSeconds(videoAsset.duration)) seconds")
                return
            }

            print("asset loaded")

            self.numberOfPlayerItems = (Int)(1.0 / CMTimeGetSeconds(videoAsset.duration)) + 2

            print("numberOfPlayerItems: ", self.numberOfPlayerItems)

            self.itemReady = true

            if self.visible {
                self.start(in: self.parentLayer)
            }
        })
    }
}

func start(in parentLayer: CALayer) {
    print("start playback")

    self.parentLayer = parentLayer

    if !itemReady {
        return
    }

    stop()

    player = AVQueuePlayer()
    playerLayer = AVPlayerLayer(player: player)
    playerLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill

    guard let playerLayer = playerLayer else { fatalError("Error creating player layer") }
    playerLayer.frame = parentLayer.bounds
    parentLayer.addSublayer(playerLayer)

    for _ in 1...numberOfPlayerItems {
        let loopItem = AVPlayerItem(asset: composition)
        self.player?.insert(loopItem, after: nil)
    }

    self.startObserving()
    self.numberOfTimesPlayed = 0
    self.player?.play()

    delegate?.playbackStarted()
}

func stop() {
    player?.pause()
    stopObserving()

    player?.removeAllItems()
    player = nil

    playerLayer?.removeFromSuperlayer()
    playerLayer = nil
}

// MARK: Convenience

private func startObserving() {
    guard let player = player, !isObserving else { return }

    player.addObserver(self, forKeyPath: ObserverContexts.playerStatusKey, options: .new, context: &ObserverContexts.playerStatus)
    player.addObserver(self, forKeyPath: ObserverContexts.currentItemKey, options: .old, context: &ObserverContexts.currentItem)
    player.addObserver(self, forKeyPath: ObserverContexts.currentItemStatusKey, options: .new, context: &ObserverContexts.currentItemStatus)

    isObserving = true
}

private func stopObserving() {
    guard let player = player, isObserving else { return }

    player.removeObserver(self, forKeyPath: ObserverContexts.playerStatusKey, context: &ObserverContexts.playerStatus)
    player.removeObserver(self, forKeyPath: ObserverContexts.currentItemKey, context: &ObserverContexts.currentItem)
    player.removeObserver(self, forKeyPath: ObserverContexts.currentItemStatusKey, context: &ObserverContexts.currentItemStatus)

    isObserving = false
}

// MARK: KVO

override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
    if context == &ObserverContexts.playerStatus {
        guard let newPlayerStatus = change?[.newKey] as? AVPlayerStatus else { return }

        if newPlayerStatus == AVPlayerStatus.failed {
            print("End looping since player has failed with error: \(player?.error)")
            stop()
        }
    }
    else if context == &ObserverContexts.currentItem {
        guard let player = player else { return }

        if player.items().isEmpty {
            print("Play queue emptied out due to bad player item. End looping")
            stop()
        }
        else {
            // If `loopCount` has been set, check if looping needs to stop.
            if numberOfTimesToPlay > 0 {
                numberOfTimesPlayed = numberOfTimesPlayed + 1

                if numberOfTimesPlayed >= numberOfTimesToPlay {
                    print("Looped \(numberOfTimesToPlay) times. Stopping.");
                    stop()
                }
            }

            /*
             Append the previous current item to the player's queue. An initial
             change from a nil currentItem yields NSNull here. Check to make
             sure the class is AVPlayerItem before appending it to the end
             of the queue.
             */
            if let itemRemoved = change?[.oldKey] as? AVPlayerItem {
                itemRemoved.seek(to: kCMTimeZero)

                stopObserving()
                player.insert(itemRemoved, after: nil)
                startObserving()
            }
        }
    }
    else if context == &ObserverContexts.currentItemStatus {
        guard let newPlayerItemStatus = change?[.newKey] as? AVPlayerItemStatus else { return }

        if newPlayerItemStatus == .failed {
            print("End looping since player item has failed with error: \(player?.currentItem?.error)")
            stop()
        }
    }
    else {
        super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
    }
}

}

extension String: Error {

} `