HTML5   发布时间:2022-04-27  发布网站:大佬教程  code.js-code.com
大佬教程收集整理的这篇文章主要介绍了ios – 使用AVMutableComposition合并视频时视频破解大佬教程大佬觉得挺不错的,现在分享给大家,也给大家做个参考。
我正在使用AVMutableComposition将视频与下面的代码合并,

- (void)MergeAndSave_internal{

    AVMutableComposition *composition = [AVMutableComposition composition];
    AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.frameDuration = CMTimeMake(1,30);
    videoComposition.renderScale = 1.0;

    AVMutableVideoCompositioninstruction *instruction = [AVMutableVideoCompositioninstruction videoCompositioninstruction];

    AVMutableVideoCompositionLayerinstruction *layerinstruction = [AVMutableVideoCompositionLayerinstruction videoCompositionLayerinstructionWithAssetTrack:compositionVideoTrack];

    NSLog(@"%@",videoPathArray);

    float time = 0;
    CMTime startTime = kCMTimeZero;

    for (int i = 0; i<videoPathArray.count; i++) {

        AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[videoPathArray objectATindex:i]] options:[NSDictionary DictionaryWithObject:[NSnumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];

        NSError *error = nil;

        BOOL ok = NO;
        AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectATindex:0];
        AVAssetTrack *sourceAudioTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] objectATindex:0];

        CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize,sourceVideoTrack.preferredTransform);
        CGSize size = CGSizeMake(fabsf(temp.width),fabsf(temp.height));
        CGAffineTransform transform = sourceVideoTrack.preferredTransform;

        videoComposition.renderSize = sourceVideoTrack.naturalSize;
        if (size.width > size.height) {

            [layerinstruction setTransform:transform atTime:CMTimeMakeWithSeconds(time,30)];
        } else {


            float s = size.width/size.height;


            CGAffineTransform newe = CGAffineTransformConcat(transform,CGAffineTransformMakeScale(s,s));

            float x = (size.height - size.width*s)/2;

            CGAffineTransform newer = CGAffineTransformConcat(newe,CGAffineTransformMakeTranslation(x,0));

            [layerinstruction setTransform:newer atTime:CMTimeMakeWithSeconds(time,30)];
        }
        if(i==0){
             [compositionVideoTrack setPreferredTransform:sourceVideoTrack.preferredTransform];
        }
        ok = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,[sourceAsset duration]) ofTrack:sourceVideoTrack atTime:startTime error:&error];


        ok = [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,[sourceAsset duration]) ofTrack:sourceAudioTrack atTime:startTime error:nil];

        if (!ok) {
            {
                [radialView4 setHidden:YES];
                NSLog(@"Export Failed: %@",[[self.exportSession error] localizedDescription]);
                UIAlertView *alert = [[UIAlertView alloc] initWithtitle:@"Error" message:@"Something Went Wrong :("  delegate:nil cancelButtontitle:@"Ok" otherButtontitles: nil,nil];
                [alert show];
                [radialView4 setHidden:YES];
                break;
            }

        }

        startTime = CMTimeAdd(startTime,[sourceAsset duration]);

    }


    instruction.layerinstructions = [NSArray arrayWithObject:layerinstruction];
    instruction.timeRange = compositionVideoTrack.timeRange;

    videoComposition.instructions = [NSArray arrayWithObject:instruction];

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);
    NSString *documentsDirectory = [paths objectATindex:0];
    NSString *myPathDocs =  [documentsDirectory StringByAppendingPathComponent:
                             [NSString StringWithFormat:@"RampMergedVideo.mov"]];
    unlink([myPathDocs UTF8String]);
    NSURL *url = [NSURL fileURLWithPath:myPathDocs];

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition
                                                                      presetName:AVAssetExportPreset1280x720];
    exporter.outputURL=url;
    exporter.outputFileType = AVFileTypeQuickTimeMovie;
    exporter.shouldoptimizeForNetworkUse = YES;
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        dispatch_async(dispatch_get_main_queue(),^{

            switch ([exporter status]) {
                case AVAssetExportSessionStatusFailed:
                    NSLog(@"Export Failed: %@",[exporter error]);
                    break;
                case AVAssetExportSessionStatusCancelled:
                    NSLog(@"Export canceled");
                    break;
                case AVAssetExportSessionStatusCompleted:{
                    NSLog(@"Export successfully");

                }
                default:
                    break;
            }
            if (exporter.status != AVAssetExportSessionStatusCompleted){
                NSLog(@"Retry export");

            }

        });
    }];

}

但视频在保存到系统并在快速播放器中播放时看起来很破旧.我认为CFAffline中的问题转化了.有人可以请教吗?

这是视频中间的破解屏幕:

解决方法

您尚未将videoComposition设置为AVAssetExportSession.尝试做这个exporter.videoComposition = videoComposition;.然尝试了这个但是应该工作.

大佬总结

以上是大佬教程为你收集整理的ios – 使用AVMutableComposition合并视频时视频破解全部内容,希望文章能够帮你解决ios – 使用AVMutableComposition合并视频时视频破解所遇到的程序开发问题。

如果觉得大佬教程网站内容还不错,欢迎将大佬教程推荐给程序员好友。

本图文内容来源于网友网络收集整理提供,作为学习参考使用,版权属于原作者。
如您有任何意见或建议可联系处理。小编QQ:384754419,请注明来意。