Yes, OpenCV runs on iOS and will provide you with a good library of tools to use. You can either create your own .framework (a little tedious) or download one from the Internet.
After that, you should be able to create computer vision software on iOS, but be careful, image processing can take a lot of energy and memory.
OpenCV has its own C ++ classes for images, you probably need to convert them back and forth to UIImage for input and display
I leave here this piece of code that I use for NSImage , you should find enough so that you change the code for UIImage
// // NSImage+OpenCV.h // #import <AppKit/AppKit.h> @interface NSImage (NSImage_OpenCV) { } +(NSImage*)imageWithCVMat:(const cv::Mat&)cvMat; -(id)initWithCVMat:(const cv::Mat&)cvMat; @property(nonatomic, readonly) cv::Mat CVMat; @property(nonatomic, readonly) cv::Mat CVGrayscaleMat; @end
AND
// // NSImage+OpenCV.mm // #import "NSImage+OpenCV.h" static void ProviderReleaseDataNOP(void *info, const void *data, size_t size) { return; } @implementation NSImage (NSImage_OpenCV) -(CGImageRef)CGImage { CGContextRef bitmapCtx = CGBitmapContextCreate(NULL/*data - pass NULL to let CG allocate the memory*/, [self size].width, [self size].height, 8 /*bitsPerComponent*/, 0 /*bytesPerRow - CG will calculate it for you if it allocating the data. This might get padded out a bit for better alignment*/, [[NSColorSpace genericRGBColorSpace] CGColorSpace], kCGBitmapByteOrder32Host|kCGImageAlphaPremultipliedFirst); [NSGraphicsContext saveGraphicsState]; [NSGraphicsContext setCurrentContext:[NSGraphicsContext graphicsContextWithGraphicsPort:bitmapCtx flipped:NO]]; [self drawInRect:NSMakeRect(0,0, [self size].width, [self size].height) fromRect:NSZeroRect operation:NSCompositeCopy fraction:1.0]; [NSGraphicsContext restoreGraphicsState]; CGImageRef cgImage = CGBitmapContextCreateImage(bitmapCtx); CGContextRelease(bitmapCtx); return cgImage; } -(cv::Mat)CVMat { CGImageRef imageRef = [self CGImage]; CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRef); CGFloat cols = self.size.width; CGFloat rows = self.size.height; cv::Mat cvMat(rows, cols, CV_8UC4); // 8 bits per component, 4 channels CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to backing data cols, // Width of bitmap rows, // Height of bitmap 8, // Bits per component cvMat.step[0], // Bytes per row colorSpace, // Colorspace kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault); // Bitmap info flags CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), imageRef); CGContextRelease(contextRef); CGImageRelease(imageRef); return cvMat; } -(cv::Mat)CVGrayscaleMat { CGImageRef imageRef = [self CGImage]; CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray(); CGFloat cols = self.size.width; CGFloat rows = self.size.height; cv::Mat cvMat = cv::Mat(rows, cols, CV_8UC1); // 8 bits per component, 1 channel CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to backing data cols, // Width of bitmap rows, // Height of bitmap 8, // Bits per component cvMat.step[0], // Bytes per row colorSpace, // Colorspace kCGImageAlphaNone | kCGBitmapByteOrderDefault); // Bitmap info flags CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), imageRef); CGContextRelease(contextRef); CGColorSpaceRelease(colorSpace); CGImageRelease(imageRef); return cvMat; } + (NSImage *)imageWithCVMat:(const cv::Mat&)cvMat { return [[[NSImage alloc] initWithCVMat:cvMat] autorelease]; } - (id)initWithCVMat:(const cv::Mat&)cvMat { NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize() * cvMat.total()]; CGColorSpaceRef colorSpace; if (cvMat.elemSize() == 1) { colorSpace = CGColorSpaceCreateDeviceGray(); } else { colorSpace = CGColorSpaceCreateDeviceRGB(); } CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); CGImageRef imageRef = CGImageCreate(cvMat.cols, // Width cvMat.rows, // Height 8, // Bits per component 8 * cvMat.elemSize(), // Bits per pixel cvMat.step[0], // Bytes per row colorSpace, // Colorspace kCGImageAlphaNone | kCGBitmapByteOrderDefault, // Bitmap info flags provider, // CGDataProviderRef NULL, // Decode false, // Should interpolate kCGRenderingIntentDefault); // Intent NSBitmapImageRep *bitmapRep = [[NSBitmapImageRep alloc] initWithCGImage:imageRef]; NSImage *image = [[NSImage alloc] init]; [image addRepresentation:bitmapRep]; CGImageRelease(imageRef); CGDataProviderRelease(provider); CGColorSpaceRelease(colorSpace); return image; } @end
( source )