HTML5   发布时间:2022-04-27  发布网站:大佬教程  code.js-code.com
大佬教程收集整理的这篇文章主要介绍了ios – 我想用GPUImage制作一个以上的输入纹理滤镜.但我得到了黑色输出大佬教程大佬觉得挺不错的,现在分享给大家,也给大家做个参考。
我想创建一个像GPU Image的GPU ImageTwoInputFilter这样的新过滤器.

这是我的代码.一个名为IFFourInputFilter的基类,很可能是GPUImageTwoInputFilter.

#import "IFFourInputFilter.h"

NSString *const kIFFourInputTextureVertexShaderString = SHADER_StriNG
(
 attribute vec4 position;
 attribute vec4 inputTextureCoordinate;
 attribute vec4 inputTextureCoordinate2;
 attribute vec4 inputTextureCoordinate3;
 attribute vec4 inputTextureCoordinate4;

 varying vec2 textureCoordinate;
 varying vec2 textureCoordinate2;
 varying vec2 textureCoordinate3;
 varying vec2 textureCoordinate4;

 void main()
 {
     gl_Position = position;
     textureCoordinate = inputTextureCoordinate.xy;
     textureCoordinate2 = inputTextureCoordinate2.xy;
     textureCoordinate3 = inputTextureCoordinate3.xy;
     textureCoordinate4 = inputTextureCoordinate4.xy;
 }
);


@implementation IFFourInputFilter

#pragma mark -
#pragma mark Initialization and teardown

- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
    if (!(self = [self initWithVertexShaderFromString:kIFFourInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))
    {
        return nil;
    }

    return self;
}

- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
{
    if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
    {
        return nil;
    }

    inputRotation2 = kGPUImageNoRotation;
    inputRotation3 = kGPUImageNoRotation;
    inputRotation4 = kGPUImageNoRotation;

    hasSetTexture1 = NO;
    hasSetTexture2 = NO;
    hasSetTexture3 = NO;

    hasReceivedFrame1 = NO;
    hasReceivedFrame2 = NO;
    hasReceivedFrame3 = NO;
    hasReceivedFrame4 = NO;
    frameWasVideo1 = NO;
    frameWasVideo2 = NO;
    frameWasVideo3 = NO;
    frameWasVideo4 = NO;
    framecheckDisabled1 = NO;
    framecheckDisabled2 = NO;
    framecheckDisabled3 = NO;
    framecheckDisabled4 = NO;

    frameTime1 = kCMTimeInvalid;
    frameTime2 = kCMTimeInvalid;
    frameTime3 = kCMTimeInvalid;
    frameTime4 = kCMTimeInvalid;

    runSynchronouslyOnVideoProcessingQueue(^{
        [GPUImageOpenGLESContext useImageProcessingContext];
        filterTextureCoordinateAttribute2 = [filterProgram attributeIndex:@"inputTextureCoordinate2"];

        filterInputTextureUniform2 = [filterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
        glEnabLevertexAttribArray(filterTextureCoordinateAttribute2);

        filterTextureCoordinateAttribute3 = [filterProgram attributeIndex:@"inputTextureCoordinate3"];

        filterInputTextureUniform3 = [filterProgram uniformIndex:@"inputImageTexture3"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
        glEnabLevertexAttribArray(filterTextureCoordinateAttribute3);

        filterTextureCoordinateAttribute4 = [filterProgram attributeIndex:@"inputTextureCoordinate4"];

        filterInputTextureUniform4 = [filterProgram uniformIndex:@"inputImageTexture4"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
        glEnabLevertexAttribArray(filterTextureCoordinateAttribute4);
    });

    return self;
}

- (void)initializeAttributes;
{
    [super initializeAttributes];
    [filterProgram addAttribute:@"inputTextureCoordinate2"];
    [filterProgram addAttribute:@"inputTextureCoordinate3"];
    [filterProgram addAttribute:@"inputTextureCoordinate4"];
}

- (void)disableFramecheck1;
{
    framecheckDisabled1 = YES;
}

- (void)disableFramecheck2;
{
    framecheckDisabled2 = YES;
}

- (void)disableFramecheck3;
{
    framecheckDisabled3 = YES;
}

- (void)disableFramecheck4;
{
    framecheckDisabled4 = YES;
}

#pragma mark -
#pragma mark Rendering

- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture;
{
    if (self.preventRendering)
    {
        return;
    }

    [GPUImageOpenGLESContext setActiveShaderProgram:filterProgram];
    [self setUniformsForProgramATindex:0];

    [self setFilterFBO];

    glClearColor(BACkgroundColorRed,BACkgroundColorGreen,BACkgroundColorBlue,BACkgroundColorAlpha);
    glClear(GL_COLOR_BUFFER_BIT);

    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D,sourceTexturE);
    glUniform1i(filterInputTextureUniform,2);

    glActiveTexture(GL_TEXTURE3);
    glBindTexture(GL_TEXTURE_2D,filtersourceTexture2);
    glUniform1i(filterInputTextureUniform2,3);

    glActiveTexture(GL_TEXTURE4);
    glBindTexture(GL_TEXTURE_2D,filtersourceTexture3);
    glUniform1i(filterInputTextureUniform3,4);

    glActiveTexture(GL_TEXTURE5);
    glBindTexture(GL_TEXTURE_2D,filtersourceTexture4);
    glUniform1i(filterInputTextureUniform4,5);

    glVertexAttribPointer(filterPositionAttribute,2,GL_FLOAT,vertices);
    glVertexAttribPointer(filterTextureCoordinateAttribute,textureCoordinates);
    glVertexAttribPointer(filterTextureCoordinateAttribute2,[[self class] textureCoordinatesForRotation:inputRotation2]);
    glVertexAttribPointer(filterTextureCoordinateAttribute3,[[self class] textureCoordinatesForRotation:inputRotation3]);
    glVertexAttribPointer(filterTextureCoordinateAttribute4,[[self class] textureCoordinatesForRotation:inputRotation4]);

    glDrawArrays(GL_TRIANGLE_StriP,4);
}

- (void)releaseInputTexturesIfNeeded;
{
    if (shouldConserveMemoryForNextFramE)
    {
        [firstTextureDelegate textureNoLongerNeededForTarget:self];
        [textureDelegate2 textureNoLongerNeededForTarget:self];
        [textureDelegate3 textureNoLongerNeededForTarget:self];
        [textureDelegate4 textureNoLongerNeededForTarget:self];
        shouldConserveMemoryForNextFrame = NO;
    }
}

#pragma mark -
#pragma mark GPUImageInput

- (NSInteger)nextAvailableTextureIndex;
{
    if (!hasSetTexture1){
        return 0;
    }else if (!hasSetTexture2) {
        return 1;
    }else if (!hasSetTexture3) {
        return 2;
    }else{
        return 3;
    }
}

- (void)seTinputTexture:(GLuint)newInputTexture aTindex:(NSInteger)textureIndex;
{
    switch (textureIndeX) {
        case 0:
            filtersourceTexture = newInputTexture;
            hasSetTexture1 = YES;
            break;
        case 1:
            filtersourceTexture2 = newInputTexture;
            hasSetTexture2 = YES;
            break;
        case 2:
            filtersourceTexture3 = newInputTexture;
            hasSetTexture3 = YES;
            break;
        case 3:
            filtersourceTexture4 = newInputTexture;
            break;
        default:
            break;
    }
}

- (void)seTinputSize:(CGSizE)newSize aTindex:(NSInteger)textureIndex;
{
    if (textureIndex == 0)
    {
        [super seTinputSize:newSize aTindex:textureIndex];

        if (CGSizeEqualToSize(newSize,CGSizeZero))
        {
            hasSetTexture1 = NO;
        }
    }
}

- (void)seTinputRotation:(GPUImageRotationModE)newInputRotation aTindex:(NSInteger)textureIndex;
{
    switch (textureIndeX) {
        case 0:
            inputRotation = newInputRotation;
            break;
        case 1:
            inputRotation2 = newInputRotation;
            break;
        case 2:
            inputRotation3 = newInputRotation;
            break;
        case 3:
            inputRotation4 = newInputRotation;
            break;
        default:
            break;
    }
}

- (CGSizE)rotatedSize:(CGSizE)sizeToRotate forIndex:(NSInteger)textureIndex;
{
    CGSize rotatedSize = sizeToRotate;

    GPUImageRotationMode rotationtocheck;
    switch (textureIndeX) {
        case 0:
            rotationtocheck = inputRotation;
            break;
        case 1:
            rotationtocheck = inputRotation2;
            break;
        case 2:
            rotationtocheck = inputRotation3;
            break;
        case 3:
            rotationtocheck = inputRotation4;
            break;
        default:
            break;
    }

    if (GPUImageRotationSwapsWidthAndHeight(rotationtocheck))
    {
        rotatedSize.width = sizeToRotate.height;
        rotatedSize.height = sizeToRotate.width;
    }

    return rotatedSize;
}

- (void)newFrameReadyAtTime:(CMTimE)frameTime aTindex:(NSInteger)textureIndex;
{
    outputTextureRetainCount = [targets count];

    // You can set up infinite update loops,so this Helps to short circuit them
    if (hasReceivedFrame1 && hasReceivedFrame2 && hasReceivedFrame3 && hasReceivedFrame4)
    {
        return;
    }

    BOOL updatedMovieFrameOppositeStillImage = NO;

    switch (textureIndeX) {
        case 0:
            hasReceivedFrame1 = YES;
            frameTime1 = frameTime;
            if (framecheckDisabled2)
            {
                hasReceivedFrame2 = YES;
            }
            if (framecheckDisabled3)
            {
                hasReceivedFrame3 = YES;
            }
            if (framecheckDisabled4)
            {
                hasReceivedFrame4 = YES;
            }

            if (!CMTIME_IS_INDEFINITE(frameTimE))
            {
                if (CMTIME_IS_INDEFINITE(frameTime2) && CMTIME_IS_INDEFINITE(frameTime3) && CMTIME_IS_INDEFINITE(frameTime4))
                {
                    updatedMovieFrameOppositeStillImage = YES;
                }
            }
            break;
        case 1:
            hasReceivedFrame2 = YES;
            frameTime2 = frameTime;
            if (framecheckDisabled1)
            {
                hasReceivedFrame1 = YES;
            }
            if (framecheckDisabled3)
            {
                hasReceivedFrame3 = YES;
            }
            if (framecheckDisabled4)
            {
                hasReceivedFrame4 = YES;
            }

            if (!CMTIME_IS_INDEFINITE(frameTimE))
            {
                if (CMTIME_IS_INDEFINITE(frameTime1) && CMTIME_IS_INDEFINITE(frameTime3) && CMTIME_IS_INDEFINITE(frameTime4))
                {
                    updatedMovieFrameOppositeStillImage = YES;
                }
            }
            break;
        case 2:
            hasReceivedFrame3 = YES;
            frameTime3 = frameTime;
            if (framecheckDisabled1)
            {
                hasReceivedFrame1 = YES;
            }
            if (framecheckDisabled2)
            {
                hasReceivedFrame2 = YES;
            }
            if (framecheckDisabled4)
            {
                hasReceivedFrame4 = YES;
            }

            if (!CMTIME_IS_INDEFINITE(frameTimE))
            {
                if (CMTIME_IS_INDEFINITE(frameTime1) && CMTIME_IS_INDEFINITE(frameTime2) && CMTIME_IS_INDEFINITE(frameTime4))
                {
                    updatedMovieFrameOppositeStillImage = YES;
                }
            }
            break;
        case 3:
            hasReceivedFrame4 = YES;
            frameTime4 = frameTime;
            if (framecheckDisabled1)
            {
                hasReceivedFrame1 = YES;
            }
            if (framecheckDisabled3)
            {
                hasReceivedFrame3 = YES;
            }
            if (framecheckDisabled2)
            {
                hasReceivedFrame2 = YES;
            }

            if (!CMTIME_IS_INDEFINITE(frameTimE))
            {
                if (CMTIME_IS_INDEFINITE(frameTime1) && CMTIME_IS_INDEFINITE(frameTime3) && CMTIME_IS_INDEFINITE(frameTime2))
                {
                    updatedMovieFrameOppositeStillImage = YES;
                }
            }
            break;
        default:
            break;
    }

    // || (hasReceivedFirstFrame && secondFramecheckDisabled) || (hasReceivedSecondFrame && firstFramecheckDisabled)
    if ((hasReceivedFrame1 && hasReceivedFrame2 && hasReceivedFrame3 && hasReceivedFrame4) || updatedMovieFrameOppositeStillImagE)
    {
        [super newFrameReadyAtTime:frameTime aTindex:0];
        hasReceivedFrame1 = NO;
        hasReceivedFrame2 = NO;
        hasReceivedFrame3 = NO;
        hasReceivedFrame4 = NO;
    }
}

- (void)setTextureDelegate:(id<GPUImageTextureDelegate>)newTextureDelegate aTindex:(NSInteger)textureIndex;
{
    switch (textureIndeX) {
        case 0:
            firstTextureDelegate = newTextureDelegate;
            break;
        case 1:
            textureDelegate2 = newTextureDelegate;
            break;
        case 2:
            textureDelegate3 = newTextureDelegate;
            break;
        case 3:
            textureDelegate4 = newTextureDelegate;
            break;
        default:
            break;
    }
}

@end

名为IFAmaroFilter的类扩展了IFFourInputFilter.

#import "IFAmaroFilter.h"

NSString *const kIFAmaroFilterFragmentShaderString = SHADER_StriNG
(
 precision lowp float;

 varying highp vec2 textureCoordinate;

 uniform sampler2D inputImageTexture;
 uniform sampler2D inputImageTexture2; //blowout;
 uniform sampler2D inputImageTexture3; //overlay;
 uniform sampler2D inputImageTexture4; //map

 void main()
 {
     vec4 texel = texture2D(inputImageTexture,textureCoordinatE);
     vec3 bbTexel = texture2D(inputImageTexture2,textureCoordinatE).rgb;

     texel.r = texture2D(inputImageTexture3,vec2(bbTexel.r,texel.r)).r;
     texel.g = texture2D(inputImageTexture3,vec2(bbTexel.g,texel.g)).g;
     texel.b = texture2D(inputImageTexture3,vec2(bbTexel.b,texel.b)).b;

     vec4 mapped;
     mapped.r = texture2D(inputImageTexture4,vec2(texel.r,0.16666)).r;
     mapped.g = texture2D(inputImageTexture4,vec2(texel.g,.5)).g;
     mapped.b = texture2D(inputImageTexture4,vec2(texel.b,.83333)).b;
     mapped.a = 1.0;

     gl_FragColor = texel;
 }
 );

@implementation IFAmaroFilter

- (id)init;
{
    if (!(self = [super initWithFragmentShaderFromString:kIFAmaroFilterFragmentShaderString]))
    {
        return nil;
    }

    return self;
}

@end

当我使用滤镜时,我得到了黑色输出.代码如下:

filter = [[IFAmaroFilter alloc] init];
    GPUImagePicture *gp1 = [[GPUImagePicture alloc] initWithImage:[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForresource:@"blackboard1024" ofType:@"png"]]];
    GPUImagePicture *gp2 = [[GPUImagePicture alloc] initWithImage:[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForresource:@"overlaymap" ofType:@"png"]]];
    GPUImagePicture *gp3 = [[GPUImagePicture alloc] initWithImage:[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForresource:@"amaroMap" ofType:@"png"]]];

    [stillCamera addTarget:filter atTextureLOCATIOn:0];
    [gp1 addTarget:filter atTextureLOCATIOn:1];
    [gp1 processImage];
    [gp2 addTarget:filter atTextureLOCATIOn:2];
    [gp2 processImage];
    [gp3 addTarget:filter atTextureLOCATIOn:3];
    [gp3 processImage];
    [filter addTarget:(GPUImageView *)self.view];

解决方法

@H_801_24@ 我发现GPUImagePicture会自动释放,因此过滤器不会接收纹理. 如果您遇到同样的问题,请仔细检查纹理的生命控制,观察它们何时被取消.

大佬总结

以上是大佬教程为你收集整理的ios – 我想用GPUImage制作一个以上的输入纹理滤镜.但我得到了黑色输出全部内容,希望文章能够帮你解决ios – 我想用GPUImage制作一个以上的输入纹理滤镜.但我得到了黑色输出所遇到的程序开发问题。

如果觉得大佬教程网站内容还不错,欢迎将大佬教程推荐给程序员好友。

本图文内容来源于网友网络收集整理提供,作为学习参考使用,版权属于原作者。
如您有任何意见或建议可联系处理。小编QQ:384754419,请注明来意。