iOS   发布时间:2022-03-30  发布网站:大佬教程  code.js-code.com
大佬教程收集整理的这篇文章主要介绍了ios – 为什么用AVCaptureSession,AVAssetWriter和AVPlayer口吃录制和重放视频?大佬教程大佬觉得挺不错的,现在分享给大家,也给大家做个参考。
我正在尝试创建一个显示相机预览的应用程序,然后在某些情况下开始使用语音输入录制此内容,最后重复录制的电影.

我已经编写了预览/录制/重放的类和管理协调的控制器.

似乎这些功能在独立调用时工作得很好,但是我不能让它们一起工作:当重放视频时,声音会运行,但图像需要大约五秒钟才能显示然后断断续续.

这是我的代码

预览:

- (void) createSession
{
    _session = [[AVCaptureSession alloc] init];

    AVCaptureDevice *device = [AVCaptureDevice deviceWithUniquEID:FRONT_CAMERA_ID];
    if (!devicE) device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    NSError *error = nil;
    _cVideoInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain];
    if (!error) [_session addInput:_cVideoInput];

    device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];

    error = nil;
    _cAudioInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain];
    if (!error) [_session addInput:_cAudioInput];

    _cameraLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session];
    _cameraLayer.frame = self.bounds;
    [self.layer addSublayer:_cameraLayer];

    _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    [_session setSessionPreset:AVCaptureSessionPreset640x480];
    [_videoOutput setVideoSetTings:[NSDictionary DictionaryWithContentsOfFile:VIDEO_SETTinGS]];

    _audioOutput = [[AVCaptureAudioDataOutput alloc] init];

    dispatch_queue_t queue = dispatch_queue_create(OUTPUT_QUEUE_NAME,null);
    [_videoOutput setSampleBufferDelegate:self queue:queue];
    [_session addOutput:_videoOutput];

    [_audioOutput setSampleBufferDelegate:self queue:queue];
    [_session addOutput:_audioOutput];

    dispatch_set_context(queue,self);
    dispatch_set_finalizer_f(queue,queue_finalizer);

    dispatch_release(queuE);

    [_session startRunning];
}

- (void) deleteSession
{
    [_session stopRunning];

    [(AVCaptureVideoPreviewLayer *)_cameraLayer setSession:nil];
    [_cameraLayer removeFromSuperlayer];
    [_cameraLayer release];
    _cameraLayer = nil;

    [_audioOutput setSampleBufferDelegate:nil queue:NULL];
    [_videoOutput setSampleBufferDelegate:nil queue:NULL];

    [_audioOutput release];
    _audioOutput = nil;

    [_videoOutput release];
    _videoOutput = nil;

    [_cAudioInput release];
    _cAudioInput = nil;

    [_cVideoInput release];
    _cVideoInput = nil;

    NSArray *inputs = [_session inputs];
    for (AVCaptureInput *input in inputs)
         [_session removeInput:input];

    NSArray *outputs = [_session outputs];
    for (AVCaptureOutput *output in outputs)
        [_session removeOutput:output];

    [_session release];
    _session = nil;
}

记录:

- (void) createWriter
{
    NSString *file = [self file];

    if ([[NSFileManager defaultManager] fileExistsAtPath:file]) [[NSFileManager defaultManager] removeItemAtPath:file error:NULL];

    NSError *error = nil;
    _writer = [[AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:file] fileType:AVFileTypeQuickTimeMovie error:&error] retain];

    if (error)
    {
        [_writer release];
        _writer = nil;

        NSLog(@"%@",error);
        return;
    }

    AudioChAnnelLayout acl;
    bzero( &acl,sizeof(acl));
    acl.mChAnnelLayoutTag = kAudioChAnnelLayoutTag_Mono;

    NSDictionary *setTings = [NSDictionary DictionaryWithObjectsAndKeys:
                              [NSnumber numberWithInt:kAudioFormatMPEG4AAC],AVFormatIDKey,[NSnumber numberWithFloat:44100.0],AVSampleRateKey,[NSnumber numberWithInt:1],AVnumberOfChAnnelsKey,[NSnumber numberWithInt:64000],AVEncoderBitRateKey,[NSData dataWithBytes:&acl length:sizeof(acl)],AVChAnnelLayoutKey,nil ];

    _wAudioInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSetTings:setTings] retain];

    [_writer addInput:_wAudioInput];

    setTings = [NSDictionary DictionaryWithObjectsAndKeys:
                AVVideoCodecH264,AVVideoCodecKey,[NSnumber numberWithInt:640],AVVideoWidthKey,[NSnumber numberWithInt:480],AVVideoHeightKey,nil];

    _wVideoInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSetTings:setTings] retain];

    [_writer addInput:_wVideoInput];
}

- (void) deleteWriter
{
    [_wVideoInput release];
    _wVideoInput = nil;

    [_wAudioInput release];
    _wAudioInput = nil;

    [_writer release];
    _writer = nil;
}

- (void) RecordingAudioWithBuffer:(CMSampleBufferRef)sampleBuffer
{
    if (![self canRecordBuffer:sampleBuffer])
        return;

    if ([_wAudioInput isReadyForMoreMediaData])
        [_wAudioInput appendSampleBuffer:sampleBuffer];
}

- (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer
{
    if (![self canRecordBuffer:sampleBuffer])
        return;

    if ([_wVideoInput isReadyForMoreMediaData])
        [_wVideoInput appendSampleBuffer:sampleBuffer];
}

播放:

- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
    AVPlayerItem *item = (AVPlayerItem *)object;
    [item removeObserver:self forKeyPath:@"status"];

    switch (item.status) 
    {
        case AVPlayerItemStatusReadyToPlay:

            [_player @L_673_30@:kCMTimeZero];
            [_player play];

            [[NsnotificationCenter defaultCenter] addObserver:self SELEctor:@SELEctor(finishPlaying:) name:AVPlayerItemDidPlayToEndTimeNotification object:item];
            break;
        case AVPlayerItemStatusUnkNown:
        case AVPlayerItemStatusFailed:
            break;
        default:
            break;
    }
}

- (void) finishPlaying:(Nsnotification *)notification
{
    [_player pause];
    [_playerLayer removeFromSuperlayer];

    [_playerLayer release];
    _playerLayer = nil;

    [_player release];
    _player = nil;

    [[NsnotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil];
}

- (void) play:(NSString *)path
{
    _player = [[AVPlayer playerWithURL:[NSURL fileURLWithPath:path]] retain];

    _playerLayer = [[AVPlayerLayer playerLayerWithPlayer:_player] retain];
    _playerLayer.transform = CATransform3DScale(CATransform3DMakeRotation(M_PI_2,1),1,-1,1);
    _playerLayer.frame = self.bounds;
    [self.layer addSublayer:_playerLayer];

    [_player.currentItem addObserver:self forKeyPath:@"status" options:0 context:NULL];
}

解决方法

我有一个类似的问题,然这并没有完全解决它,它确实帮助了很多:
如果您还没有准备再次写入,请尝试让设备稍微休眠,然后重新评估是否可以写入数据,而不是仅丢失示例缓冲区.

- (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer
{
if (![self canRecordBuffer:sampleBuffer])
    return;

if (!_wVideoInput.readyForMoreMediaData && _isRecording)
{
    [self performSELEctor:@SELEctor(RecordingVideoWithBuffer:) withObject:(__bridge id)(sampleBuffer) afterDelay:0.05];
    return;
}

[_wVideoInput appendSampleBuffer:sampleBuffer];

如果您不使用ARC,只需要sampleBuffer即可.但ARC需要添加__bridge.

编辑:我使用PerformSELEctor并返回而不是使用NSThread睡眠的while循环,因为它是非阻塞的.阻塞很糟糕.

大佬总结

以上是大佬教程为你收集整理的ios – 为什么用AVCaptureSession,AVAssetWriter和AVPlayer口吃录制和重放视频?全部内容,希望文章能够帮你解决ios – 为什么用AVCaptureSession,AVAssetWriter和AVPlayer口吃录制和重放视频?所遇到的程序开发问题。

如果觉得大佬教程网站内容还不错,欢迎将大佬教程推荐给程序员好友。

本图文内容来源于网友网络收集整理提供,作为学习参考使用,版权属于原作者。
如您有任何意见或建议可联系处理。小编QQ:384754419,请注明来意。