MNC Identifier is a service to identify and verify consumer with AI in it. This SDK has 2 main features
- Face Identifier (1.1.11) (for face identification)
- OCR Identifier (1.1.11) (for optical character recognition)
add this to your podfile
#This is for Face Identifier
pod 'MNCIdentifier/Face', '1.1.11'
pod 'GoogleMLKit/FaceDetection', '4.0.0'
#This is for OCR Identifier
pod 'MNCIdentifier/OCR', '1.1.11'
pod 'GoogleMLKit/TextRecognition', '4.0.0'
pod 'GoogleMLKit/ObjectDetection', '4.0.0'
In Objective-C
#import <MNCFaceIdentifier/MNCFaceIdentifierClient.h>
#import <MNCFaceIdentifier/MNCFaceIdentifierDelegate.h>
@interface ViewController() <MNCFaceIdentifierDelegate>
@end
@implementation ViewController
- (void)viewDidLoad {
.......
MNCFaceIdentifierClient client = [MNCFaceIdentifierClient new];
client.delegate = self;
// customize detection sequence
NSArray<NSNumber *> *steps = @[@(BLINK), @(SMILE), @(HOLD_STILL), @(SHAKE_HEAD)];
client.sequenceOfSteps = steps;
}
- (void)buttonTapped:(UIButton *)sender {
[client showFaceIdentifier:self];
}
- (void)faceIdentifierResult:(MNCFaceIdentifierResult *)result {
//this function return data result from Face Identifier
//this function return captured image from process face identifier with type data UIImage
}
@end
In Swift
import MNCFaceIdentifier
class ViewController: UIViewController, MNCFaceIdentifierDelegate {
override func viewDidLoad() {
.......
let client = MNCFaceIdentifierClient()
client.delegate = self
// cutomize detection sequence
let steps: [SequenceStep] = [
.BLINK,
.SMILE,
.HOLD_STILL,
.SHAKE_HEAD
]
client.sequenceOfSteps = steps.map { NSNumber(value: $0.rawValue) }
}
@IBAction func buttonTapped(_ sender: UIButton) {
client.showFaceIdentifier(self)
}
func faceIdentifierResult(_ result: MNCFaceIdentifierResult!) {
//this function return data result from Face Identifier
//this function return captured image from process face identifier with type data UIImage
}
}
In Objective-C
#import <MNCOCRIdentifier/MNCOCRIdentifierClient.h>
#import <MNCOCRIdentifier/MNCOCRIdentifierDelegate.h>
@interface ViewController() <MNCOCRIdentifierDelegate>
@end
@implementation ViewController
- (void)viewDidLoad {
.......
MNCOCRIdentifierClient *client = [MNCOCRIdentifierClient new];
client.delegate = self;
client.isFlashEnable = NO;
client.isCameraOnly = NO; //this properties for MNCOCRIdentifer with features camera only
}
- (void)buttonTapped:(UIButton *)sender {
[client showOCRIdentifier:self];
}
- (void)ocrResult:(MNCOCRIdentifierResult *)result {
//this function return data result from OCR.
//This function return KTP image Path and KTP Data
}
@end
In Swift
import MNCOCRIdentifier
class ViewController: UIViewController, MNCOCRIdentifierDelegate {
override func viewDidLoad() {
.......
let client = MNCOCRIdentifierClient()
client.delegate = self
client.isFlashEnable = true
client.isCameraOnly = true //this properties for MNCOCRIdentifer with features camera only
}
@IBAction func buttonTapped(_ sender: UIButton) {
client.showOCRIdentifier(self)
}
func ocrResult(_ result: MNCOCRIdentifierResult?) {
//this function return data result from OCR.
//This function return KTP image Path and KTP Data
}
}