首页 > 解决方案 > iOS 使用 AVFoundation 将连续的视频片段合并为一个最终视频

问题描述

我正在做一个需要录制视频片段然后将这些片段合并到一个视频中的项目。我正在使用 AVFoundation 框架来做到这一点。

问题是当我合并片段时,块之间总是有黑框或没有声音。此外,有时合并后声音不同步。

我尝试了很多方法来解决这个问题,但没有找到好的解决方案。我在堆栈溢出上尝试了许多解决方案,但没有一个有效。

我也尝试过使用 MKOVideoMerge 但我仍然有问题。

我在下面制作了一个小型视图控制器,它记录摄像机并每 10 秒创建一个新段。当用户点击“停止”时,所有片段都会合并并保存到相机胶卷:

如果有人设法合并两个视频片段而不会丢失帧或声音,我们将不胜感激:)

import UIKit
import AVFoundation
import Photos
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate {
private enum SessionSetupResult {
    case success
    case notAuthorized
    case configurationFailed
}

@IBOutlet weak var btnStartStop: UIButton!
@IBOutlet weak var previewView: PreviewView!

private let session = AVCaptureSession()
private let sessionQueue = DispatchQueue(label: "session queue")
private let mergeQueue = DispatchQueue(label: "merge queue")
private var setupResult: SessionSetupResult = .success

private var videoDeviceInput: AVCaptureDeviceInput!
lazy private var movieBufferOutput = AVCaptureVideoDataOutput()
lazy private var audioBufferOutput = AVCaptureAudioDataOutput()
private var movieConnection: AVCaptureConnection!
private var audioConnection: AVCaptureConnection!

private var assetWriter: AVAssetWriter! = nil
private var assetWriterInput: AVAssetWriterInput! = nil
private var audioWriterInput: AVAssetWriterInput! = nil

private var chunkNumber = 0
private let chunkMaxDuration = 10.0
private var chunkStartTime: CMTime! = nil
private var chunkOutputURL: URL! = nil

private var stopRecording: Bool = false

@IBAction func startStop(_ sender: Any) {
    stopRecording = true
    btnStartStop.setTitle("recording", for: .normal)
}

override func viewDidLoad() {
    super.viewDidLoad()

    previewView.session = session
    previewView.videoPreviewLayer.videoGravity = .resizeAspectFill

    switch AVCaptureDevice.authorizationStatus(for: .video) {
    case .authorized:
        break
    case .notDetermined:
        sessionQueue.suspend()
        AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in
            if !granted {
                self.setupResult = .notAuthorized
            }
            self.sessionQueue.resume()
        })

    default:
        setupResult = .notAuthorized
    }

    sessionQueue.async {
        self.configureSession()
    }
}

override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)

    self.cleanTempDirectory()

    sessionQueue.async {
        switch self.setupResult {
        case .success:
            break
        case .notAuthorized:
            DispatchQueue.main.async {
                let changePrivacySetting = "Not authorized"
                let message = NSLocalizedString(changePrivacySetting, comment: "Alert message when the user has denied access to the camera")
                let alertController = UIAlertController(title: "Not authorized", message: message, preferredStyle: .alert)

                alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
                                                        style: .cancel,
                                                        handler: nil))

                alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", comment: "Alert button to open Settings"),
                                                        style: .`default`,
                                                        handler: { _ in
                                                            UIApplication.shared.open(URL(string: UIApplicationOpenSettingsURLString)!, options: [:], completionHandler: nil)
                }))

                self.present(alertController, animated: true, completion: nil)
            }

        case .configurationFailed:
            DispatchQueue.main.async {
                let alertMsg = "Error"
                let message = NSLocalizedString("Error", comment: alertMsg)
                let alertController = UIAlertController(title: "Error", message: message, preferredStyle: .alert)

                alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
                                                        style: .cancel,
                                                        handler: nil))

                self.present(alertController, animated: true, completion: nil)
            }
        }
    }
}

private func configureSession() {
    if setupResult != .success {
        return
    }

    session.beginConfiguration()
    session.sessionPreset = .high

    do {
        var defaultVideoDevice: AVCaptureDevice?

        if let dualCameraDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) {
            defaultVideoDevice = dualCameraDevice
        } else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) {
            defaultVideoDevice = backCameraDevice
        } else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) {
            defaultVideoDevice = frontCameraDevice
        }

        let videoDeviceInput = try AVCaptureDeviceInput(device: defaultVideoDevice!)

        if session.canAddInput(videoDeviceInput) {
            session.addInput(videoDeviceInput)
            self.videoDeviceInput = videoDeviceInput

            DispatchQueue.main.async {
                self.previewView.videoPreviewLayer.connection?.videoOrientation = .landscapeRight
            }
        } else {
            setupResult = .configurationFailed
            session.commitConfiguration()
            return
        }
    } catch {
        setupResult = .configurationFailed
        session.commitConfiguration()
        return
    }

    do {
        let audioDevice = AVCaptureDevice.default(for: .audio)
        let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!)

        if session.canAddInput(audioDeviceInput) {
            session.addInput(audioDeviceInput)
        }
    } catch {

    }

    movieBufferOutput.videoSettings = [
        String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
    ]

    if self.session.canAddOutput(movieBufferOutput) {
        self.session.addOutput(movieBufferOutput)

        if let connection = self.movieBufferOutput.connection(with: .video) {
            movieConnection = connection
            connection.videoOrientation = .landscapeRight

            if connection.isVideoStabilizationSupported {
                connection.preferredVideoStabilizationMode = .auto
            }
        }
    } else {
        setupResult = .configurationFailed
        session.commitConfiguration()
        return
    }

    if self.session.canAddOutput(audioBufferOutput) {
        self.session.addOutput(audioBufferOutput)

        if let connection = self.audioBufferOutput.connection(with: .audio) {
            audioConnection = connection
        }

    } else {
        print("Could not add audio output to the session")
        setupResult = .configurationFailed
        session.commitConfiguration()
        return
    }

    let queue: DispatchQueue = DispatchQueue(label: "MediaOutputQueue")
    let audioQueue: DispatchQueue = DispatchQueue(label: "AudioOutputQueue")
    self.movieBufferOutput.setSampleBufferDelegate(self, queue: queue)
    self.audioBufferOutput.setSampleBufferDelegate(self, queue: audioQueue)
    self.movieBufferOutput.alwaysDiscardsLateVideoFrames = true

    session.commitConfiguration()

    self.session.startRunning()
}

func createWriterInput(for presentationTimeStamp: CMTime) {
    self.stopRecording = false

    let fileManager = FileManager.default
    let outputFileName = "chunk\(chunkNumber)"
    let outputFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((outputFileName as NSString).appendingPathExtension("mp4")!)

    chunkOutputURL = URL(fileURLWithPath: outputFilePath)
    try? fileManager.removeItem(at: chunkOutputURL)

    assetWriter = try! AVAssetWriter(outputURL: chunkOutputURL, fileType: .mp4)

    let outputSettings: [String: Any] = [AVVideoCodecKey:AVVideoCodecH264, AVVideoWidthKey: 1280, AVVideoHeightKey: 720]
    assetWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: outputSettings)
    assetWriterInput.expectsMediaDataInRealTime = true
    assetWriter.add(assetWriterInput)

    let audioSettings = [
        AVFormatIDKey : kAudioFormatMPEG4AAC,
        AVNumberOfChannelsKey : 1,
        AVSampleRateKey : 44100.0,
        AVEncoderBitRateKey: 192000
        ] as [String : Any]

    audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
    audioWriterInput.expectsMediaDataInRealTime = true;
    assetWriter.add(audioWriterInput)

    chunkNumber += 1
    chunkStartTime = presentationTimeStamp

    assetWriter.startWriting()
    assetWriter.startSession(atSourceTime: chunkStartTime)
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

override var shouldAutorotate: Bool {
    return false
}

override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
    return .landscapeRight
}

override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
    return .landscapeRight
}

func cleanTempDirectory() {
    let tempVideosPath = NSTemporaryDirectory()
    var isDirectory = ObjCBool(true)

    if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
        return
    }

    let tempVideosURL = URL(fileURLWithPath: tempVideosPath)

    do {
        let directoryContents = try FileManager.default.contentsOfDirectory(at: tempVideosURL, includingPropertiesForKeys: [.contentModificationDateKey], options: [.skipsHiddenFiles,.skipsSubdirectoryDescendants])

        let mp4Files = directoryContents.filter{ $0.pathExtension == "mp4" }.map { url in
            (url, (try? url.resourceValues(forKeys: [.creationDateKey]))?.creationDate ?? Date.distantPast)
            }.sorted(by: { $0.1 < $1.1 })

        for mp4 in mp4Files {
            try? FileManager.default.removeItem(at: mp4.0)
        }

    } catch {

    }
}

func getTempVideos() -> [URL] {
    let tempVideosPath = NSTemporaryDirectory()
    var isDirectory = ObjCBool(true)

    if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
        return []
    }

    var videosURL: [URL] = []

    let videosUrl = URL(fileURLWithPath: tempVideosPath)

    do {
        let directoryContents = try FileManager.default.contentsOfDirectory(at: videosUrl, includingPropertiesForKeys: [.contentModificationDateKey], options: [.skipsHiddenFiles,.skipsSubdirectoryDescendants])

        let mp4Files = directoryContents.filter{ $0.pathExtension == "mp4" }.map { url in
            (url, (try? url.resourceValues(forKeys: [.creationDateKey]))?.creationDate ?? Date.distantPast)
            }.sorted(by: { $0.1 > $1.1 })

        var i = 0
        for mp4 in mp4Files {
            videosURL.append(mp4.0)
            i = i + 1

            if i > 5 {
                break
            }
        }
    } catch {
        return []
    }

    return videosURL
}

func getRecordedVideoURL() -> URL {
    var tempVideosPath = NSTemporaryDirectory()

    tempVideosPath = (tempVideosPath as NSString).appendingPathComponent("videos")

    var isDirectory = ObjCBool(true)

    if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
        do {
            try FileManager.default.createDirectory(at: URL(fileURLWithPath: tempVideosPath), withIntermediateDirectories: true, attributes: nil)
        } catch {

        }
    }

    let outputFileName = "test-" + NSUUID().uuidString
    let outputFileURL = URL(fileURLWithPath: tempVideosPath).appendingPathComponent(outputFileName).appendingPathExtension("mp4")

    try? FileManager.default.removeItem(at: outputFileURL)

    return outputFileURL
}

func mergeVideos(urls:[URL], excludedUrl: URL, completion:@escaping (_ exporter: AVAssetExportSession?) -> ()) -> Void {
    let mainComposition = AVMutableComposition()
    let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
    let soundtrackTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
    let assetOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: true]

    var insertTime = kCMTimeZero
    var audioInsertTime = kCMTimeZero
    var videos: [(asset: AVURLAsset, videoTrack: AVAssetTrack, videoDuration: CMTime)] = []

    for url in urls {
        if url.path != excludedUrl.path {
            let videoAsset = AVURLAsset(url: url, options : assetOptions)

            if videoAsset.tracks(withMediaType: .video).count > 0 && videoAsset.tracks(withMediaType: .audio).count > 0 {
                let videoTrack = videoAsset.tracks(withMediaType: .video)[0]
                let videoDuration = videoTrack.timeRange.duration

                videos.append((asset: videoAsset, videoTrack: videoTrack, videoDuration: videoDuration))
            } else {
                break
            }
        }
    }


    var hasError: Bool = false
    for video in videos.reversed() {
        let audioTrack = video.asset.tracks(withMediaType: .audio)[0]
        let audioDuration = audioTrack.timeRange.duration

        do {
            try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video.videoDuration), of: video.videoTrack, at: insertTime)
        } catch let error {
            hasError = true
            print(error)
        }

        do {
            try soundtrackTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, audioDuration), of: audioTrack, at: audioInsertTime)
        } catch let error {
            hasError = true
            print(error)
        }

        insertTime = CMTimeAdd(insertTime, video.videoDuration)
        audioInsertTime = CMTimeAdd(audioInsertTime, audioDuration)
    }

    if videos.count == 0 {
        hasError = true
    }

    if !hasError {
        let outputFileURL = getRecordedVideoURL()
        let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
        exporter?.outputURL = outputFileURL
        exporter?.outputFileType = AVFileType.mp4
        exporter?.shouldOptimizeForNetworkUse = true

        exporter?.exportAsynchronously {
            DispatchQueue.main.async {
                completion(exporter!)
            }
        }
    } else {
        completion(nil)
    }
}

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    if connection == self.audioConnection {
        if let audioInput = self.audioWriterInput, audioInput.isReadyForMoreMediaData {
            if !audioInput.append(sampleBuffer) {
                print("Error writing audio buffer");
            }
        }
    } else {
        if let videoInput = self.assetWriterInput, videoInput.isReadyForMoreMediaData {
            if !videoInput.append(sampleBuffer) {
                print("Error writing video buffer");
            }
        }
    }

    if connection == movieConnection {
        let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

        if assetWriter == nil {
            createWriterInput(for: presentationTimeStamp)
        } else {
            let currentChunkDuration = CMTimeGetSeconds(CMTimeSubtract(presentationTimeStamp, chunkStartTime))

            if currentChunkDuration >= chunkMaxDuration || self.stopRecording == true {
                let chunkAssetWriter = assetWriter!
                let assetWriterInput = self.assetWriterInput
                let audioWriterInput = self.audioWriterInput
                let stopRecording = self.stopRecording

                createWriterInput(for: presentationTimeStamp)

                assetWriterInput?.markAsFinished()
                audioWriterInput?.markAsFinished()

                chunkAssetWriter.endSession(atSourceTime: presentationTimeStamp)

                chunkAssetWriter.finishWriting {
                    DispatchQueue.main.async {
                        self.btnStartStop.setTitle("stop", for: .normal)
                    }
                    if stopRecording {
                        self.mergeQueue.async {
                            self.mergeVideos(urls: self.getTempVideos(), excludedUrl: self.chunkOutputURL!, completion: { exportSession in
                                if let exportSession = exportSession {
                                    PHPhotoLibrary.shared().performChanges({
                                        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportSession.outputURL!)
                                    }) { saved, error in
                                        DispatchQueue.main.async {
                                            if saved {
                                                let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
                                                let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
                                                alertController.addAction(defaultAction)
                                                self.present(alertController, animated: true, completion: nil)
                                            }
                                        }
                                    }
                                }
                            })
                        }
                    }
                }
            }
        }
    }
}}

谢谢你的帮助 :)

标签: iosswiftavfoundation

解决方案


推荐阅读