I use the ios 11 vision concept to get real-time face orientation points. I can get face landmark points and overlay the camera layer on UIBezierPath for face landmarks. However, I would like to get something like the bottom right picture. I currently have something similar to the left image, and I tried to iterate over the points and add midpoints, but I don't know how to generate all these triangles from the points. How can I create a map to the right of the dots to the left?
I'm not sure I can with all the points that I have, and not that it will help too much, but I also have glasses from the bounding box of the whole face. Finally, is there any structure that would allow me to recognize all the points I need, such as openCV or something else, please let me know. Thanks!

Here is the code I used from https://github.com/DroidsOnRoids/VisionFaceDetection :
func detectLandmarks(on image: CIImage) { try? faceLandmarksDetectionRequest.perform([faceLandmarks], on: image) if let landmarksResults = faceLandmarks.results as? [VNFaceObservation] { for observation in landmarksResults { DispatchQueue.main.async { if let boundingBox = self.faceLandmarks.inputFaceObservations?.first?.boundingBox { let faceBoundingBox = boundingBox.scaled(to: self.view.bounds.size) //different types of landmarks let faceContour = observation.landmarks?.faceContour self.convertPointsForFace(faceContour, faceBoundingBox) let leftEye = observation.landmarks?.leftEye self.convertPointsForFace(leftEye, faceBoundingBox) let rightEye = observation.landmarks?.rightEye self.convertPointsForFace(rightEye, faceBoundingBox) let leftPupil = observation.landmarks?.leftPupil self.convertPointsForFace(leftPupil, faceBoundingBox) let rightPupil = observation.landmarks?.rightPupil self.convertPointsForFace(rightPupil, faceBoundingBox) let nose = observation.landmarks?.nose self.convertPointsForFace(nose, faceBoundingBox) let lips = observation.landmarks?.innerLips self.convertPointsForFace(lips, faceBoundingBox) let leftEyebrow = observation.landmarks?.leftEyebrow self.convertPointsForFace(leftEyebrow, faceBoundingBox) let rightEyebrow = observation.landmarks?.rightEyebrow self.convertPointsForFace(rightEyebrow, faceBoundingBox) let noseCrest = observation.landmarks?.noseCrest self.convertPointsForFace(noseCrest, faceBoundingBox) let outerLips = observation.landmarks?.outerLips self.convertPointsForFace(outerLips, faceBoundingBox) } } } } } func convertPointsForFace(_ landmark: VNFaceLandmarkRegion2D?, _ boundingBox: CGRect) { if let points = landmark?.points, let count = landmark?.pointCount { let convertedPoints = convert(points, with: count) let faceLandmarkPoints = convertedPoints.map { (point: (x: CGFloat, y: CGFloat)) -> (x: CGFloat, y: CGFloat) in let pointX = point.x * boundingBox.width + boundingBox.origin.x let pointY = point.y * boundingBox.height + boundingBox.origin.y return (x: pointX, y: pointY) } DispatchQueue.main.async { self.draw(points: faceLandmarkPoints) } } } func draw(points: [(x: CGFloat, y: CGFloat)]) { let newLayer = CAShapeLayer() newLayer.strokeColor = UIColor.blue.cgColor newLayer.lineWidth = 4.0 let path = UIBezierPath() path.move(to: CGPoint(x: points[0].x, y: points[0].y)) for i in 0..<points.count - 1 { let point = CGPoint(x: points[i].x, y: points[i].y) path.addLine(to: point) path.move(to: point) } path.addLine(to: CGPoint(x: points[0].x, y: points[0].y)) newLayer.path = path.cgPath shapeLayer.addSublayer(newLayer) }
ios image-processing swift swift4
Ali
source share