How to create a simple custom filter for iOS using the Core Image Framework?

I want to use the custom filter in my application . Now I know that I need to use Core Image , but I'm not sure if this is the right way. The Core Image framework is used for Mac OS and iOS 5.0 . I'm not sure if this can be used for CIFilter custom effects. Can you help me with these problems? Thanks everyone!

+5
source share
4 answers

You cannot create your own kernels / filters in iOS. See http://developer.apple.com/library/mac/#documentation/graphicsimaging/Conceptual/CoreImaging/ci_intro/ci_intro.html in particular:

Although this document is included in the reference library, it has not been updated in iOS 5.0. The upcoming revision will detail the differences in Core Image on iOS. In particular, the key difference is that Core Image on iOS does not include the ability to create custom image filters .

(Bolding mine)

+5
source

According to Adam, currently Core Image on iOS does not support custom kernels, as the old Mac does. This limits what you can do with the wireframe to be some combination of existing filters.

(: 2/13/2012)

iOS GPUImage, OpenGL ES 2.0. , . , OpenGL Shading Language (GLSL) , . iOS, OpenGL ES 2.0, , iOS 4.0.

, , :

GPUImageVideoCamera *videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
GPUImageFilter *customFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"CustomShader"];
GPUImageView *filteredVideoView = [[GPUImageView alloc] initWithFrame:CGRectMake(0.0, 0.0, viewWidth, viewHeight)];

// Add the view somewhere so it visible

[videoCamera addTarget:thresholdFilter];
[customFilter addTarget:filteredVideoView];

[videoCamera startCameraCapture];

, , :

varying highp vec2 textureCoordinate;

uniform sampler2D inputImageTexture;

void main()
{
    lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
    lowp vec4 outputColor;
    outputColor.r = (textureColor.r * 0.393) + (textureColor.g * 0.769) + (textureColor.b * 0.189);
    outputColor.g = (textureColor.r * 0.349) + (textureColor.g * 0.686) + (textureColor.b * 0.168);    
    outputColor.b = (textureColor.r * 0.272) + (textureColor.g * 0.534) + (textureColor.b * 0.131);

    gl_FragColor = outputColor;
}

, Core Image Mac, GLSL. , , Core Image, Core Image , GLSL (, ).

+20

iOS , Image Unit MacOS X, , , Image Unit iOS. , "" , Image Unit; , . , ; iOS , MacOS X, Core Image. .

, Core Image iOS , Image Unit. , .plist -.

iOS - Cocoa Touch, CIFilter; ( ), , Core Image. ​​OpenGL , CIKernel, .cycernel, .

Core Image iOS , , :

CIFilter* PrewittKernel = [CIFilter filterWithName:@"PrewittKernel"];

CIImage *result = [CIFilter filterWithName:@"PrewittKernel" keysAndValues:kCIInputImageKey, self.inputImage, nil].outputImage;

, OpenGL Prewitt ; -, Cocoa Touch Class ( CIFilter), CIKernel ( OpenGL ES 3.0):

:

//
//  PrewittKernel.h
//  Photo Filter
//
//  Created by James Alan Bush on 5/23/15.
//
//

#import <CoreImage/CoreImage.h>

@interface PrewittKernel : CIFilter
{
    CIImage *inputImage;
}

@property (retain, nonatomic) CIImage *inputImage;

@end

:

//
//  PrewittKernel.m
//  Photo Filter
//
//  Created by James Alan Bush on 5/23/15.
//
//

#import <CoreImage/CoreImage.h>

@interface PrewittKernel : CIFilter
{
    CIImage *inputImage;
}

@property (retain, nonatomic) CIImage *inputImage;

@end


@implementation PrewittKernel

@synthesize inputImage;

- (CIKernel *)prewittKernel
{
    static CIKernel *kernelPrewitt = nil;

    NSBundle    *bundle = [NSBundle bundleForClass:NSClassFromString(@"PrewittKernel")];
    NSStringEncoding encoding = NSUTF8StringEncoding;
    NSError     *error = nil;
    NSString    *code = [NSString stringWithContentsOfFile:[bundle pathForResource:@"PrewittKernel" ofType:@"cikernel"] encoding:encoding error:&error];

    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        kernelPrewitt = [CIKernel kernelWithString:code];
    });

    return kernelPrewitt;
}

- (CIImage *)outputImage
{
    CIImage *result = self.inputImage;
    return [[self prewittKernel] applyWithExtent:result.extent roiCallback:^CGRect(int index, CGRect rect) {
        return CGRectMake(0, 0, CGRectGetWidth(result.extent), CGRectGetHeight(result.extent));
    } arguments:@[result]];
}

@end

CIKernel (OpenGL ES 3.0):

/* PrewittKernel.cikernel */

kernel vec4 prewittKernel(sampler image)
{
    vec2 xy = destCoord();
    vec4 bottomLeftIntensity = sample(image, samplerTransform(image, xy + vec2(-1, -1)));
    vec4 topRightIntensity = sample(image, samplerTransform(image, xy + vec2(+1, +1)));
    vec4 topLeftIntensity = sample(image, samplerTransform(image, xy + vec2(+1, -1)));
    vec4 bottomRightIntensity = sample(image, samplerTransform(image, xy + vec2(-1, +1)));
    vec4 leftIntensity = sample(image, samplerTransform(image, xy + vec2(-1, 0)));
    vec4 rightIntensity = sample(image, samplerTransform(image, xy + vec2(+1, 0)));
    vec4 bottomIntensity = sample(image, samplerTransform(image, xy + vec2(0, -1)));
    vec4 topIntensity = sample(image, samplerTransform(image, xy + vec2(0, +1)));
    vec4 h = vec4(-topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity);
    vec4 v = vec4(-bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity);
    float h_max = max(h.r, max(h.g, h.b));
    float v_max = max(v.r, max(v.g, v.b));
    float mag = length(vec2(h_max, v_max)) * 1.0;

    return vec4(vec3(mag), 1.0);
}

, (, , ) Core Image - Core Image (OpenGL); , , , :

:

//
//  GaussianKernel.h
//  Chroma
//
//  Created by James Alan Bush on 7/12/15.
//  Copyright © 2015 James Alan Bush. All rights reserved.
//

#import <CoreImage/CoreImage.h>

@interface GaussianKernel : CIFilter
{
    CIImage *inputImage;
    NSNumber *inputRadius;
}

@property (retain, nonatomic) CIImage *inputImage;
@property (retain, nonatomic) NSNumber *inputRadius;

@end

:

//
//  GaussianKernel.m
//  Chroma
//
//  Created by James Alan Bush on 7/12/15.
//  Copyright © 2015 James Alan Bush. All rights reserved.
//

#import "GaussianKernel.h"

@implementation GaussianKernel

@synthesize inputImage;
@synthesize inputRadius;

+ (NSDictionary *)customAttributes
{
    return @{
             @"inputRadius" :
                 @{
                     kCIAttributeMin       : @3.0,
                     kCIAttributeMax       : @15.0,
                     kCIAttributeDefault   : @7.5,
                     kCIAttributeType      : kCIAttributeTypeScalar
                     }
             };
}

- (void)setDefaults
{
    self.inputRadius = @7.5;
}

    - (CIImage *)outputImage
    {
        CIImage *result = self.inputImage;

        CGRect rect = [[GlobalCIImage sharedSingleton].ciImage extent];
        rect.origin = CGPointZero;
        CGRect cropRectLeft = CGRectMake(0, 0, rect.size.width, rect.size.height);
        CIVector *cropRect = [CIVector vectorWithX:rect.origin.x Y:rect.origin.y Z:rect.size.width W:rect.size.height];

    result = [[CIFilter filterWithName:@"CIGaussianBlur" keysAndValues:kCIInputImageKey, result, @"inputRadius", [NSNumber numberWithFloat:inputRadius.floatValue], nil].outputImage imageByCroppingToRect:cropRectLeft];

    result = [CIFilter filterWithName:@"CICrop" keysAndValues:@"inputImage", result, @"inputRectangle", cropRect, nil].outputImage;

    result = [CIFilter filterWithName:@"CIDifferenceBlendMode" keysAndValues:kCIInputImageKey, result, kCIInputBackgroundImageKey, result, nil].outputImage;

        return result;
    }

    @end
0

All Articles