ios - 为什么 AVAssetWriter 创建的文件大小为 0?
问题描述
我正在尝试录制由自定义 CIFilters 过滤的视频。并成功创建文件和预览。但导出的文件大小为零,无法打开。但是调试器中没有发生错误,也没有崩溃。为什么会这样??
我的代码在这里。
import UIKit
import AVFoundation
import RxCocoa
import RxSwift
class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
private let disposeBag = DisposeBag()
let camPreview = UIView()
let imageView = UIImageView()
var camera: AVCaptureDevice!
var videoInput: AVCaptureDeviceInput!
let captureSession = AVCaptureSession()
private lazy var videoOutput: AVCaptureVideoDataOutput! = {
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
videoOutput.alwaysDiscardsLateVideoFrames = true
return videoOutput
}()
var assetWriter: AVAssetWriter!
var videoAssetInput: AVAssetWriterInput!
var pixelBuffer: AVAssetWriterInputPixelBufferAdaptor!
var startTime: CMTime!
var endTime: CMTime!
var frameNumber: Int64 = 0
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor.white
imageView.backgroundColor = UIColor.groupTableViewBackground
imageView.contentMode = .scaleAspectFill
imageView.layer.masksToBounds = true
self.view.addSubview(imageView)
imageView.snp.makeConstraints { make in
make.edges.equalToSuperview()
}
self.view.layoutIfNeeded()
if setupSession() {
startSession()
}
let startButton = UIButton()
startButton.setTitle("startButton", for: .normal)
startButton.rx.controlEvent(.touchUpInside)
.asDriver()
.drive(onNext: { self.startRecording() })
.disposed(by: disposeBag)
self.view.addSubview(startButton)
startButton.snp.makeConstraints { make in
make.center.equalTo(self.view.snp.center)
}
let stopButton = UIButton()
stopButton.setTitle("stopButton", for: .normal)
stopButton.rx.controlEvent(.touchUpInside)
.asDriver()
.drive(onNext: { self.stopRecording() })
.disposed(by: disposeBag)
self.view.addSubview(stopButton)
stopButton.snp.makeConstraints { make in
make.centerX.equalTo(self.view.snp.centerX)
make.top.equalTo(startButton.snp.bottom).offset(30.0)
}
}
private func startRecording() {
let documentPath = NSHomeDirectory() + "/Documents/"
let filePath = documentPath + "video.mp4"
let fileURL = URL(fileURLWithPath: filePath)
let videoSettings = [
AVVideoWidthKey: 480,
AVVideoHeightKey: 640,
AVVideoCodecKey: AVVideoCodecType.h264
] as [String: Any]
videoAssetInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
pixelBuffer = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoAssetInput, sourcePixelBufferAttributes: [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)])
frameNumber = 0
do {
try assetWriter = AVAssetWriter(outputURL: fileURL, fileType: .mp4)
videoAssetInput.expectsMediaDataInRealTime = true
assetWriter.add(videoAssetInput)
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: CMTime.zero)
print(#function)
} catch {
print(error)
}
}
private func stopRecording() {
if videoAssetInput == nil { return }
videoAssetInput.markAsFinished()
assetWriter.endSession(atSourceTime: endTime)
assetWriter.finishWriting {
self.videoAssetInput = nil
}
}
func setupSession() -> Bool {
captureSession.sessionPreset = AVCaptureSession.Preset.vga640x480
let camera = AVCaptureDevice.default(for: .video)
do {
let input = try AVCaptureDeviceInput(device: camera!)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
} catch {
print(error)
return false
}
if captureSession.canAddOutput(videoOutput) {
captureSession.addOutput(videoOutput)
}
return true
}
func startSession() {
if !captureSession.isRunning {
DispatchQueue.main.async {
self.captureSession.startRunning()
print(#function)
}
}
}
func stopSession() {
if captureSession.isRunning {
DispatchQueue.main.async {
self.captureSession.stopRunning()
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
autoreleasepool {
connection.videoOrientation = AVCaptureVideoOrientation.portrait
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext(options: nil)
let imageRef = context.createCGImage(cameraImage, from: cameraImage.extent)
let image = UIImage(cgImage: imageRef!)
DispatchQueue.main.async {
self.imageView.filter(_image: image)
guard
let videoAssetInput = self.videoAssetInput,
let displayedImage = self.imageView.image
else {
return
}
if !CMSampleBufferDataIsReady(sampleBuffer) {
return
}
if self.frameNumber == 0 {
self.startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
}
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let frameTime = CMTimeSubtract(timestamp, self.startTime)
if videoAssetInput.isReadyForMoreMediaData {
if let pxBuffer: CVPixelBuffer = self.buffer(from: displayedImage) {
self.pixelBuffer.append(pxBuffer, withPresentationTime: frameTime)
}
self.frameNumber += 1
}
self.endTime = frameTime
}
}
}
func buffer(from image: UIImage) -> CVPixelBuffer? {
let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
var pixelBuffer : CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(image.size.width), Int(image.size.height), kCVPixelFormatType_32ARGB, attrs, &pixelBuffer)
guard (status == kCVReturnSuccess) else {
return nil
}
CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: pixelData, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
context?.translateBy(x: 0, y: image.size.height)
context?.scaleBy(x: 1.0, y: -1.0)
UIGraphicsPushContext(context!)
image.draw(in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
UIGraphicsPopContext()
CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
}
抱歉,我的代码仍然一团糟,可能会崩溃。但这不是目前的重点...
解决方案
问题已经解决了。似乎已经在 Document 目录中的视频妨碍了。当我删除它时,我能够安全地导出视频。
推荐阅读
- c# - 如何使用 appsetting.json 上的配置为 ASP.Net Core 中的每个类任务创建单独的日志文件
- kql - Kusto 查询 - 显示最近的行
- python - 如何迭代包含Django中另一个字典的字典?
- sql - 如何在 SQL 中查找带有 unicode 字符的损坏记录并删除该记录
- javascript - Blazor WebAssembly 在不使用观察者的情况下调用 JS 函数出错
- html - 在Angular9及更高版本的列表顶部推送已选中/选中的复选框
- ckeditor - 无法修改表格中的表单元素
- maven - 依赖项“com.sun.xml.internal.ws.transport.http.HttpAdapter.dump_threshold”在我的 Eclipse 中没有解决?
- c# - 由于字段验证器,asp.net 无法按“返回”按钮
- woocommerce - OpenAS2 诏令命令 Woo 命令如何?