File: ViewController.swift

package info (click to toggle)
opencv 4.10.0%2Bdfsg-5
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 282,092 kB
  • sloc: cpp: 1,178,079; xml: 682,621; python: 49,092; lisp: 31,150; java: 25,469; ansic: 11,039; javascript: 6,085; sh: 1,214; cs: 601; perl: 494; objc: 210; makefile: 173
file content (92 lines) | stat: -rw-r--r-- 3,098 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
//
//  ViewController.swift
//
//  Created by Giles Payne on 2020/03/02.
//

import UIKit
import OpenCV

extension Rect {
    func rotateClockwise(parentHeight:Int32) {
        let tmpX = self.x
        self.x = parentHeight - (self.y + self.height)
        self.y = tmpX
        swapDims()
    }

    func rotateCounterclockwise(parentWidth:Int32) {
        let tmpY = self.y
        self.y = parentWidth - (self.x + self.width)
        self.x = tmpY
        swapDims()
    }

    func swapDims() {
        let tmpWidth = self.width
        self.width = self.height
        self.height = tmpWidth
    }
}

class ViewController: UIViewController, CvVideoCameraDelegate2 {

    let swiftDetector = CascadeClassifier(filename: Bundle(for: ViewController.self).path(forResource:"lbpcascade_frontalface", ofType:"xml")!)
    let nativeDetector = DetectionBasedTracker(cascadeName: Bundle(for: ViewController.self).path(forResource:"lbpcascade_frontalface", ofType:"xml")!, minFaceSize: 0)
    var rgba: Mat? = nil
    var gray: Mat = Mat()
    var relativeFaceSize: Float = 0.2
    var absoluteFaceSize: Int32 = 0
    let FACE_RECT_COLOR = Scalar(0.0, 255.0, 0.0, 255.0)
    let FACE_RECT_THICKNESS: Int32 = 4

    func processImage(_ image: Mat!) {
        let orientation = UIDevice.current.orientation
        switch orientation {
        case .landscapeLeft:
            rgba = Mat()
            Core.rotate(src: image, dst: rgba!, rotateCode: .ROTATE_90_COUNTERCLOCKWISE)
        case .landscapeRight:
            rgba = Mat()
            Core.rotate(src: image, dst: rgba!, rotateCode: .ROTATE_90_CLOCKWISE)
        default:
            rgba = image
        }

        Imgproc.cvtColor(src: rgba!, dst: gray, code: .COLOR_RGB2GRAY)

        if (absoluteFaceSize == 0) {
            let height = gray.rows()
            if (round(Float(height) * relativeFaceSize) > 0) {
                absoluteFaceSize = Int32(round(Float(height) * relativeFaceSize))
            }
        }

        var faces = [Rect]()

        swiftDetector.detectMultiScale(image: gray, objects: &faces, scaleFactor: 1.1, minNeighbors: Int32(2), flags: Int32(2), minSize: Size(width: absoluteFaceSize, height: absoluteFaceSize), maxSize: Size())
        //let facesArray = NSMutableArray()
        //nativeDetector!.detect(gray, faces: facesArray)
        //faces.append(contentsOf: facesArray)

        for face in faces {
            if orientation == .landscapeLeft {
                face.rotateClockwise(parentHeight: gray.rows())
            } else if orientation == .landscapeRight {
                face.rotateCounterclockwise(parentWidth: gray.cols())
            }
            Imgproc.rectangle(img: image, pt1: face.tl(), pt2: face.br(), color: FACE_RECT_COLOR, thickness: FACE_RECT_THICKNESS)
        }
    }

    var camera: CvVideoCamera2? = nil

    @IBOutlet weak var cameraHolder: UIView!
    override func viewDidLoad() {
        super.viewDidLoad()
        camera = CvVideoCamera2(parentView: cameraHolder)
        camera?.rotateVideo = true
        camera?.delegate = self
        camera?.start()
    }
}