ios - AVMutableVideoComposition & AVVideoCompositionCoreAnimationTool 颜色故障
问题描述
我正在尝试使用以下代码将图像添加到视频中:
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[self assetURL] options:nil];
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize = CGSizeApplyAffineTransform(videoTrack.naturalSize, videoTrack.preferredTransform);
mainCompositionInst.renderSize = naturalSize;
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
CGFloat ratio = naturalSize.width / rectVideo.size.width;
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
videoLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[parentLayer addSublayer:videoLayer];
for(DraggableImageView *iv in imageViews){
CALayer *overlayLayer = [CALayer layer];
[overlayLayer setContents:(id)[[self normalizedImage:iv.image] CGImage]];
overlayLayer.frame = CGRectMake(iv.frame.origin.x * ratio, (rectVideo.size.height - iv.frame.origin.y - iv.frame.size.height) * ratio, iv.frame.size.width * ratio, iv.frame.size.height * ratio);
[overlayLayer setMasksToBounds:YES];
overlayLayer.opacity = 0;
overlayLayer.masksToBounds = YES;
CGFloat duration = CMTimeGetSeconds(iv.endTime) - CMTimeGetSeconds(iv.startTime);
CGFloat start = CMTimeGetSeconds(iv.startTime);
if(duration > CMTimeGetSeconds(videoAsset.duration)){
duration = CMTimeGetSeconds(videoAsset.duration);
start = 0;
}
CGFloat fadeInSeconds = iv.fadeInSeconds;
CGFloat fade = fadeInSeconds / duration;
CAKeyframeAnimation *anim = [[CAKeyframeAnimation alloc] init];
anim.keyPath = @"opacity";
anim.values = @[@0, @1, @1, @0];
NSMutableArray *keyTimes = [NSMutableArray new];
[keyTimes addObject:@0];
[keyTimes addObject:@(fade)];
[keyTimes addObject:@(1.0 - fade)];
[keyTimes addObject:@1];
anim.keyTimes = keyTimes;
anim.duration = duration;
anim.beginTime = AVCoreAnimationBeginTimeAtZero + start;
[overlayLayer addAnimation:anim forKey:@"anim"];
[parentLayer addSublayer:overlayLayer];
}
mainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
NSURL *exportURL = [self exportUrl];
exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL = exportURL;
exportSession.videoComposition = mainCompositionInst;
exportSession.outputFileType = _recordSession.fileType;
exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
exportProgressTimer = [NSTimer scheduledTimerWithTimeInterval:.1 target:self selector:@selector(exportDidProgress:) userInfo:nil repeats:YES];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted:
[self savingComplete:exportSession.error];
break;
case AVAssetExportSessionStatusFailed:
[self savingComplete:exportSession.error];
break;
case AVAssetExportSessionStatusCancelled:
break;
default:
break;
}
}];
此代码在 iPhone SE 上运行良好,但在 iPad Air 2 上会在视频的右边缘出现这种颜色故障(放大的屏幕截图):
此故障不是静态的,它会垂直变化(大小和 y 位置)。我认为这不是视频大小的问题(众所周知,当视频大小不能被 4 或 16 整除时,边缘出现绿线问题)。此视频的大小为 1080x1920。
有任何想法吗?
解决方案
推荐阅读
- java - springboot如何从自定义yml加载属性
- hyperledger - 超级账本资源管理器
- asynchronous - pyspark 中的 collectAsync 等价物
- reactjs - 使用 WebPack / Babel 删除生产 React JS 中的 console.log
- python-3.x - python-无法为谷歌云存储对象创建签名网址
- java - Java 类自身正常编译,但通过 JSP 运行时抛出 NoClassFoundException 或 NoClassDefFoundError
- python-3.x - 无法从 Amazon Connect 调用 Python Lambda 函数
- android - glTexImage2D: 得到 err pre :( 0x506 内部 0x1908 格式 0x1908 类型 0x1401
- excel-formula - col2 中每组的交替单元格阴影
- regex - 如何获取有关单元格的所有信息,基于 cell_list.txt