首页 > 解决方案 > 从图像收集制作的视频需要大量时间才能播放

问题描述

我正在尝试制作一个视频编辑器,我在其中完成了以下操作:

1)从用户图库中收集的照片。

2)转换视频中的照片数组。

3)为视频添加了动画。

4)播放的视频

我为此做的代码如下: -

1)从用户图库中收集的照片。

    func openImagePicker(){
        let customColor = UIColor.init(red: 64.0/255.0, green: 0.0, blue: 144.0/255.0, alpha: 1.0)
        let customCameraColor = UIColor.init(red: 86.0/255.0, green: 1.0/255.0, blue: 236.0/255.0, alpha: 1.0)

        pickerViewController.numberOfPhotoToSelect = 5;        pickerViewController.theme.titleLabelTextColor = UIColor.white
        pickerViewController.theme.navigationBarBackgroundColor = customColor
        pickerViewController.theme.tintColor = UIColor.white
        pickerViewController.theme.orderTintColor = customCameraColor
        pickerViewController.theme.cameraVeilColor = customCameraColor
        pickerViewController.theme.cameraIconColor = UIColor.white
        pickerViewController.theme.statusBarStyle = .lightContent
        self.yms_presentCustomAlbumPhotoView(pickerViewController, delegate: self)

    }

    func photoPickerViewController(_ picker: YMSPhotoPickerViewController!, didFinishPickingImages photoAssets: [PHAsset]!) {
        picker.dismiss(animated: true) {
            self.selectedImageArray =  NSMutableArray()
            let imageManager = PHImageManager.init()
            let options = PHImageRequestOptions.init()
            options.deliveryMode = .highQualityFormat
            options.resizeMode = .exact
            options.isSynchronous = true
            for asset: PHAsset in photoAssets
            {
                let targetSize = CGSize(width:self.view.frame.size.width
                    , height:self.view.frame.size.width)
                imageManager.requestImage(for: asset, targetSize:targetSize, contentMode: .aspectFill, options: options, resultHandler: { (image, info) in
                    self.selectedImageArray.add(image!)
                })
            }

            let imageVideaMakerController = self.storyboard?.instantiateViewController(withIdentifier: "VideoEditorController") as! VideoEditorController

            imageVideaMakerController.selectedImageArray = self.selectedImageArray as! [UIImage]
            self.navigationController!.pushViewController(imageVideaMakerController, animated: true)

        }
    }

2)转换视频中的照片数组。

override func viewDidAppear(_ animated: Bool) {
        self.navigationController?.navigationBar.isHidden = false

         setUpInitialView()
        collectionView.reloadData()
    }

    //MARK:- Custom Methods
    func setUpInitialView(){
        let loadingNotification = MBProgressHUD.showAdded(to: view, animated: true)
        loadingNotification.mode = MBProgressHUDMode.indeterminate
        loadingNotification.label.text = "Loading"

        buildVideoFromImageArray()
        //filterScrollContents()
    }

func buildVideoFromImageArray() {

        imageArrayToVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/video1.MP4")
        removeFileAtURLIfExists(url: imageArrayToVideoURL)
        guard let videoWriter = try? AVAssetWriter(outputURL: imageArrayToVideoURL as URL, fileType: AVFileType.mp4) else {
            fatalError("AVAssetWriter error")
        }
        let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
        guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
            fatalError("Negative : Can't apply the Output settings...")
        }
        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
        let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))]
        let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
        if videoWriter.canAdd(videoWriterInput) {
            videoWriter.add(videoWriterInput)
        }
        if videoWriter.startWriting() {
            let zeroTime = CMTimeMake(Int64(imagesPerSecond),Int32(1))
            videoWriter.startSession(atSourceTime: zeroTime)

            assert(pixelBufferAdaptor.pixelBufferPool != nil)
            let media_queue = DispatchQueue(label: "mediaInputQueue")
            videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
                let fps: Int32 = 1
                let framePerSecond: Int64 = Int64(self.imagesPerSecond)
                let frameDuration = CMTimeMake(Int64(self.imagesPerSecond), fps)
                var frameCount: Int64 = 0
                var appendSucceeded = true
                var newImageArr = self.selectedImageArray
                while (!newImageArr.isEmpty) {
                    if (videoWriterInput.isReadyForMoreMediaData) {
                        let nextPhoto = newImageArr.remove(at: 0)
                        let lastFrameTime = CMTimeMake(frameCount * framePerSecond, fps)
                        let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
                        var pixelBuffer: CVPixelBuffer? = nil
                        let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
                        if let pixelBuffer = pixelBuffer, status == 0 {
                            let managedPixelBuffer = pixelBuffer
                            CVPixelBufferLockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                            let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                            let context = CGContext(data: data, width: Int(self.outputSize.width), height: Int(self.outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
                            context!.clear(CGRect(x: 0, y: 0, width: CGFloat(self.outputSize.width), height: CGFloat(self.outputSize.height)))
                            let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
                            let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
                            //let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
                            let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit
                            let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
                            let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
                            let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0
                            context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
                            CVPixelBufferUnlockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                            appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
                        } else {
                            print("Failed to allocate pixel buffer")
                            appendSucceeded = false
                        }
                    }
                    if !appendSucceeded {
                        break
                    }
                    frameCount += 1
                }
                videoWriterInput.markAsFinished()
                videoWriter.finishWriting { () -> Void in
                    print("-----video1 url = \(self.imageArrayToVideoURL)")
                    self.globalVideoURL = self.imageArrayToVideoURL

                    self.asset = AVAsset.init(url:self.imageArrayToVideoURL as URL)

                    self.exportVideoWithAnimation()
                }
            })
        }
    }

3)为视频添加了动画。

func exportVideoWithAnimation() {
        let composition = AVMutableComposition()
        let track =  self.asset?.tracks(withMediaType: AVMediaType.video)
        let videoTrack:AVAssetTrack = track![0] as AVAssetTrack
        let timerange = CMTimeRangeMake(kCMTimeZero, (self.asset?.duration)!)

        let compositionVideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID())!

        do {
            try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero)
            compositionVideoTrack.preferredTransform = videoTrack.preferredTransform
        } catch {
            print(error)
        }

        //if your video has sound, you don’t need to check this
        if self.audioIsEnabled {
            let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID())!

            for audioTrack in (self.asset?.tracks(withMediaType: AVMediaType.audio))! {
                do {
                    try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, of: audioTrack, at: kCMTimeZero)
                } catch {
                    print(error)
                }
            }
        }

        let size = videoTrack.naturalSize

        let videolayer = CALayer()
        videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)

        let parentlayer = CALayer()
        parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
        parentlayer.addSublayer(videolayer)

        ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
        //this is the animation part
        var time = [0.00001, 3, 6, 9, 12] //I used this time array to determine the start time of a frame animation. Each frame will stay for 3 secs, thats why their difference is 3
        var imgarray = self.selectedImageArray

        for image in 0..<self.selectedImageArray.count {


            let nextPhoto = imgarray[image]

            let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
            let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
            let aspectRatio = min(horizontalRatio, verticalRatio)
            let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
            let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
            let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0
            let blackLayer = CALayer()
            ///#7. opacity(1->0)(top->bottom)///
            //#3. top->bottom///
            //MARK:- Animations==================================
            ///#1. left->right///
            if(self.globalSelectedTransitionTag == 0){

                blackLayer.frame = CGRect(x: -videoTrack.naturalSize.width, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
                blackLayer.backgroundColor = UIColor.black.cgColor

                let imageLayer = CALayer()
                imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
                imageLayer.contents = imgarray[image].cgImage
                blackLayer.addSublayer(imageLayer)


                let animation = CABasicAnimation()
                animation.keyPath = "position.x"
                animation.fromValue = -videoTrack.naturalSize.width
                animation.toValue = 5 * (videoTrack.naturalSize.width)
                animation.duration = 5
                animation.beginTime = CFTimeInterval(time[image])
                animation.fillMode = kCAFillModeForwards
                animation.isRemovedOnCompletion = false
                blackLayer.add(animation, forKey: "opacity")
            }

            parentlayer.addSublayer(blackLayer)
        }        
        let layercomposition = AVMutableVideoComposition()
        layercomposition.frameDuration = CMTimeMake(1, 30)
        layercomposition.renderSize = size

        layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
        let instruction = AVMutableVideoCompositionInstruction()
        instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
        let videotrack = composition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
        let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
        instruction.layerInstructions = [layerinstruction]
        layercomposition.instructions = [instruction]
        if(fromTransition){
            self.globalrVideoComposition = layercomposition
        }
        let animatedVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/video2.mp4")
        self.removeFileAtURLIfExists(url: animatedVideoURL)

        guard let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality) else {return}
        assetExport.videoComposition = self.globalrVideoComposition
        assetExport.outputFileType = AVFileType.mp4
        assetExport.outputURL = animatedVideoURL as URL
        print("****** animatedVideoURL *****",animatedVideoURL)
        assetExport.exportAsynchronously(completionHandler: {
            switch assetExport.status{
            case  AVAssetExportSessionStatus.failed:
                print("failed \(String(describing: assetExport.error))")
            case AVAssetExportSessionStatus.cancelled:
                print("cancelled \(String(describing: assetExport.error))")
            default:
                print("Exported")

                if(self.fromPlayVideo){
                    DispatchQueue.main.async {

                        self.globalVideoURL = animatedVideoURL; self.playVideoInPlayer(animatedVideoURL: animatedVideoURL as URL)
                    }
                }else if(self.fromSave){

                    PHPhotoLibrary.shared().performChanges({
                        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: animatedVideoURL as URL)
                        print("222222 animatedVideoURL",animatedVideoURL)



                    }) { saved, error in
                        DispatchQueue.main.async {
                            MBProgressHUD.hideAllHUDs(for: self.view, animated: true)
                        }

                        if saved {

                            let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
                            let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
                            alertController.addAction(defaultAction)

                            print("The task is done,enjoy now!")
                            self.present(alertController, animated: true, completion: nil)
                        }else{

                        }
                    }
                }
            }
        })
    }

4)播放的视频

func playVideoInPlayer(animatedVideoURL:URL){

        if(globalFilterName != nil){
            self.asset = AVAsset.init(url:animatedVideoURL as URL)
            let newPlayerItem = AVPlayerItem.init(asset:self.asset);
            newPlayerItem.videoComposition=globalrVideoComposition
            self.player = AVPlayer.init(playerItem:newPlayerItem)
        }else{
            let newPlayerItem = AVPlayerItem.init(url:animatedVideoURL)
            self.player = AVPlayer.init(playerItem:newPlayerItem)
        }

        NotificationCenter.default.addObserver(self, selector: #selector(self.finishedPlaying(_:)), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object:nil)
        self.playerLayer = AVPlayerLayer.init(player:self.player)
        let width: CGFloat = self.videoContainerView.frame.size.width
        let height: CGFloat = self.videoContainerView.frame.size.height
        self.playerLayer.frame = CGRect(x: 0.0, y:0, width: width, height: height)
        self.playerLayer.backgroundColor = UIColor.black.cgColor
        self.playerLayer.videoGravity = .resizeAspectFill
        self.videoContainerView.layer.addSublayer( self.playerLayer)

        self.playPauseBtn.isHidden = false

        self.playPauseBtn.setImage(UIImage.init(named:"pause"), for:.normal)

        DispatchQueue.main.async {
            MBProgressHUD.hideAllHUDs(for:self.view, animated:true)
            self.player.play()
        }

    }

整个任务运行良好,唯一的问题是,使用所有设置(将图像转换为视频并添加动画)播放视频需要花费大量时间。请帮助我减少时间,以便用户在从 imagepicker 收集图像后登陆播放视频时不必等待很长时间。

任何帮助或指导将不胜感激。在此先感谢!

标签: iosmultithreadingavfoundationswift4

解决方案


推荐阅读