HTML5   发布时间:2022-04-27  发布网站:大佬教程  code.js-code.com
大佬教程收集整理的这篇文章主要介绍了如何在iOS 8中使用AVSampleBufferDisplayLayer进行RTP H264 Streams with GStreamer?大佬教程大佬觉得挺不错的,现在分享给大家,也给大家做个参考。
在iOS 8中的程序员获取HW-H264解码器的通知之后,我想现在使用它.有一个很好的介绍,直接访问视频编码和解码从WWDC 2014年那里.你可以看看 here.

基于案例1,我开始开发一个应用程序,应该可以从GStreamer获取一个H264-RTP-UDP-Stream,将其下载到一个“appsink”元素中,以直接访问NAL单元并执行转换创建CMSampleBuffers,我的AVSampleBufferDisplayLayer可以显示.

有趣的代码如下:

//
//  GStreamerBACkend.m
// 

#import "GStreamerBACkend.h"

NSString * const naluTypesStrings[] = {
    @"Unspecified (non-VCL)",@"Coded slice of a non-IDR picture (VCL)",@"Coded slice data partition A (VCL)",@"Coded slice data partition B (VCL)",@"Coded slice data partition C (VCL)",@"Coded slice of an IDR picture (VCL)",@"Supplemental enhancement information (SEI) (non-VCL)",@"Sequence parameter set (non-VCL)",@"Picture parameter set (non-VCL)",@"Access unit delimiter (non-VCL)",@"End of sequence (non-VCL)",@"End of stream (non-VCL)",@"Filler data (non-VCL)",@"Sequence parameter set extension (non-VCL)",@"Prefix NAL unit (non-VCL)",@"Subset sequence parameter set (non-VCL)",@"Reserved (non-VCL)",@"Coded slice of an auxiliary coded picture without partitioning (non-VCL)",@"Coded slice extension (non-VCL)",@"Coded slice extension for depth view components (non-VCL)",@"Unspecified (non-VCL)",};


static GstFlowReturn new_sample(GstAppSink *sink,gpointer user_data)
{
    GStreamerBACkend *BACkend = (__bridge GStreamerBACkend *)(user_data);
    GstSample *sample = gst_app_sink_pull_sample(sink);
    GstBuffer *buffer = gst_sample_geT_Buffer(samplE);
    GstMemory *memory = gsT_Buffer_get_all_R_174_11845@emory(buffer);

    GstMapInfo info;
    gst_memory_map (memory,&info,GST_MAP_READ);

    int startCodeIndex = 0;
    for (int i = 0; i < 5; i++) {
        if (info.data[i] == 0x01) {
            startCodeIndex = i;
            break;
        }
    }
    int nalu_type = ((uint8_t)info.data[startCodeIndex + 1] & 0x1F);
    NSLog(@"NALU with Type \"%@\" received.",naluTypesStrings[nalu_type]);
    if(BACkend.searchForSPSAndPPS) {
        if (nalu_type == 7)
            BACkend.spsData = [NSData dataWithBytes:&(info.data[startCodeIndex + 1]) length: info.size - 4];

        if (nalu_type == 8)
            BACkend.ppsData = [NSData dataWithBytes:&(info.data[startCodeIndex + 1]) length: info.size - 4];

        if (BACkend.spsData != nil && BACkend.ppsData != nil) {
            const uint8_t* const parameterSetPointers[2] = { (const uint8_t*)[BACkend.spsData bytes],(const uint8_t*)[BACkend.ppsData bytes] };
            const size_t parameterSetSizes[2] = { [BACkend.spsData length],[BACkend.ppsData length] };

            CMVideoFormatDescriptionRef videoFormatDescr;
            OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kcfAllocatorDefault,2,parameterSetPointers,parameterSetSizes,4,&videoFormatDescr);
            [BACkend setVideoFormatDescr:videoFormatDescr];
            [BACkend setSearchForSPSAndPPS:false];
            NSLog(@"Found all data for CMVideoFormatDescription. Creation: %@.",(status == noErr) ? @"successfully." : @"Failed.");
        }
    }
    if (nalu_type == 1 || nalu_type == 5) {
        CMBlockBufferRef videoBlock = NULL;
        OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL,info.data,info.size,kcfAllocatorNull,NULL,&videoBlock);
        NSLog(@"BlockBufferCreation: %@",(status == kCMBlockBufferNoErr) ? @"successfully." : @"Failed.");
        const uint8_t sourceBytes[] = {(uint8_t)(info.size >> 24),(uint8_t)(info.size >> 16),(uint8_t)(info.size >> 8),(uint8_t)info.sizE};
        status = CMBlockBufferreplaceDataBytes(sourceBytes,videoBlock,4);
        NSLog(@"BlockBufferreplace: %@",(status == kCMBlockBufferNoErr) ? @"successfully." : @"Failed.");

        CMSampleBufferRef sbRef = NULL;
        const size_t sampleSizeArraY[] = {info.sizE};

        status = CMSampleBufferCreate(kcfAllocatorDefault,true,BACkend.videoFormatDescr,1,sampleSizeArray,&sbRef);
        NSLog(@"SampleBufferCreate: %@",(status == noErr) ? @"successfully." : @"Failed.");

        CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sbRef,YES);
        CFMutableDictionaryRef Dict = (CFMutableDictionaryRef)CFArrayGetValueATindex(attachments,0);
        CFDictionarySETVALue(Dict,kCMSampleAttachmentKey_DisplayImmediately,kcfBooleanTruE);

        NSLog(@"Error: %@,Status:%@",BACkend.displayLayer.error,(BACkend.displayLayer.status == AVQueuedSampleBufferRenderingStatusUnkNown)?@"unkNown":((BACkend.displayLayer.status == AVQueuedSampleBufferRenderingStatusRendering)?@"rendering":@"Failed"));
        dispatch_async(dispatch_get_main_queue(),^{
            [BACkend.displayLayer enqueueSampleBuffer:sbRef];
            [BACkend.displayLayer setNeedsDisplay];
        });

    }

    gst_memory_unmap(memory,&info);
    gst_memory_unref(memory);
    gsT_Buffer_unref(buffer);

    return GST_FLOW_OK;
}

@implementation GStreamerBACkend

- (instanCETypE)init
{
    if (self = [super init]) {
        self.searchForSPSAndPPS = true;
        self.ppsData = nil;
        self.spsData = nil;
        self.displayLayer = [[AVSampleBufferDisplayLayer alloc] init];
        self.displayLayer.bounds = CGRectMake(0,300,300);
        self.displayLayer.BACkgroundColor = [UIColor blackColor].CGColor;
        self.displayLayer.position = CGPointMake(500,500);
        self.queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT,0);
        dispatch_async(self.queue,^{
            [self app_function];
        });
    }
    return self;
}

- (void)start
{
    if(gst_element_set_state(self.pipeline,GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        NSLog(@"Failed to set pipeline to playing");
    }
}

- (void)app_function
{
    GstElement *udpsrc,*rtphdepay,*capsfilter;
    GMainContext *context; /* GLib context used to run the main loop */
    GMainLoop *main_loop;  /* GLib main loop */


    context = g_main_context_new ();
    g_main_context_push_thread_default(context);

    g_set_application_name ("appsink");

    self.pipeline = gst_pipeline_new ("testpipe");

    udpsrc = gst_element_factory_make ("udpsrc","udpsrc");
    GstCaps *caps = gst_caps_new_simple("application/x-rtp","media",G_TYPE_StriNG,"video","clock-rate",G_TYPE_INT,90000,"encoding-name","H264",null);
    g_object_set(udpsrc,"caps",caps,"port",5000,null);
    gst_caps_unref(caps);
    rtphdepay = gst_element_factory_make("rtph264depay","rtph264depay");
    capsfilter = gst_element_factory_make("capsfilter","capsfilter");
    caps = gst_caps_new_simple("video/x-h264","streamformat","byte-stream","alignment","nal",null);
    g_object_set(capsfilter,null);
    self.appsink = gst_element_factory_make ("appsink","appsink");

    gsT_Bin_add_many (GST_BIN (self.pipelinE),udpsrc,rtphdepay,capsfilter,self.appsink,null);

    if(!gst_element_link_many (udpsrc,NULL)) {
        NSLog(@"CAnnot link gstreamer elements");
        exit (1);
    }

    if(gst_element_set_state(self.pipeline,GST_STATE_READY) != GST_STATE_CHANGE_succesS)
        NSLog(@"Could not change to ready");

    GstAppSinkCallBACks callBACks = { NULL,new_sample,NULL};
    gst_app_sink_set_callBACks (GST_APP_SINK(self.appsink),&callBACks,(__bridge gpointer)(self),null);

    main_loop = g_main_loop_new (context,falSE);
    g_main_loop_run (main_loop);


    /* Free resources */
    g_main_loop_unref (main_loop);
    main_loop = NULL;
    g_main_context_pop_thread_default(context);
    g_main_context_unref (context);
    gst_element_set_state (GST_ELEMENT (self.pipelinE),GST_STATE_null);
    gst_object_unref (GST_OBjeCT (self.pipelinE));
}

@end

运行应用程序并开始流式传输到iOS设备时所获得的内容

NALU with Type "Sequence parameter set (non-VCL)" received.
NALU with Type "Picture parameter set   (non-VCL)" received.

Found all data for CMVideoFormatDescription. Creation: successfully..

NALU with Type "Coded slice of an IDR picture (VCL)" received.
BlockBufferCreation: successfully.
BlockBufferreplace: successfully.
SampleBufferCreate: successfully.
Error: (null),Status:unkNown

NALU with Type "Coded slice of a non-IDR picture (VCL)" received.
BlockBufferCreation: successfully.
BlockBufferreplace: successfully.
SampleBufferCreate: successfully.
Error: (null),Status:rendering
[...] (repetition of the last 5 lines)

所以似乎解码,因为它应该做,但我的问题是,我看不到任何东西在我的AVSampleBufferDisplayLayer.
这可能是kCMSampleAttachmentKey_DisplayImmediately一个问题,但我已经设置好像我被告知到here (see the ‘important’ note).

欢迎每一个想法;)

解决方法

现在工作了每个NALU的长度不包含长度头本身.所以我已经从我的info.size中减去4,然后再将它用于我的sourceBytes.

大佬总结

以上是大佬教程为你收集整理的如何在iOS 8中使用AVSampleBufferDisplayLayer进行RTP H264 Streams with GStreamer?全部内容,希望文章能够帮你解决如何在iOS 8中使用AVSampleBufferDisplayLayer进行RTP H264 Streams with GStreamer?所遇到的程序开发问题。

如果觉得大佬教程网站内容还不错,欢迎将大佬教程推荐给程序员好友。

本图文内容来源于网友网络收集整理提供,作为学习参考使用,版权属于原作者。
如您有任何意见或建议可联系处理。小编QQ:384754419,请注明来意。