Initial commit

James Peret 7 years ago
commit
99e640f749
11 changed files with 2797 additions and 0 deletions
  1. BIN
      .DS_Store
  2. 2 0
      .gitignore
  3. 124 0
      camera_test.py
  4. 122 0
      camera_test_v3.py
  5. 132 0
      camera_test_v4.py
  6. 129 0
      face_camera_v2.py
  7. 217 0
      face_recognizer_v1.py
  8. 220 0
      face_recognizer_v2.py
  9. 224 0
      face_recognizer_v3.py
  10. 122 0
      facetracker.py
  11. 1505 0
      lbpcascade_frontalface.xml

BIN
.DS_Store


+ 2 - 0
.gitignore

@@ -0,0 +1,2 @@
1
+faces.csv
2
+photos/

+ 124 - 0
camera_test.py

@@ -0,0 +1,124 @@
1
+import Tkinter as tk
2
+import cv2, sys, time, os, math
3
+from PIL import Image, ImageTk
4
+import numpy as numpy
5
+
6
+from os import listdir
7
+from os.path import isfile, join
8
+
9
+# Load the BCM V4l2 driver for /dev/video0
10
+os.system('sudo modprobe bcm2835-v4l2')
11
+# Set the framerate ( not sure this does anything! )
12
+os.system('v4l2-ctl -p 4')
13
+
14
+width, height = 320, 240
15
+cap = cv2.VideoCapture(0)
16
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH,  width)
17
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, height)
18
+
19
+cascPath = '/home/pi/lbpcascade_frontalface.xml'
20
+faceCascade = cv2.CascadeClassifier(cascPath)
21
+
22
+root = tk.Tk()
23
+root.attributes("-fullscreen", True)
24
+root.bind('<Escape>', lambda e: root.quit())
25
+
26
+lmain = tk.Label(root)
27
+lmain.pack()
28
+
29
+last_image_faces = []
30
+
31
+def show_frame():
32
+    _, frame = cap.read()
33
+    frame = cv2.flip(frame, 1)
34
+    frame = faceDetect(frame)
35
+    cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)
36
+    img = Image.fromarray(cv2image)
37
+    imgtk = ImageTk.PhotoImage(image=img)
38
+    lmain.imgtk = imgtk
39
+    lmain.configure(image=imgtk)
40
+    lmain.after(1, show_frame)
41
+
42
+def faceDetect(frame):
43
+
44
+    # Do face detection
45
+    #faces = faceCascade.detectMultiScale(frame, 1.1, 3, 0, (10, 10))
46
+
47
+    #Slower method
48
+    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
49
+    gray = cv2.equalizeHist( gray )
50
+    faces = faceCascade.detectMultiScale(
51
+	gray,
52
+        scaleFactor=1.1,
53
+        minNeighbors=4,
54
+        minSize=(20, 20),
55
+        flags=cv2.cv.CV_HAAR_SCALE_IMAGE | cv2.cv.CV_HAAR_FIND_BIGGEST_OBJECT | cv2.cv.CV_HAAR_DO_ROUGH_SEARCH
56
+    )
57
+
58
+    print "Found {0} faces!".format(len(faces))
59
+    global last_image_faces
60
+    image_faces = []
61
+
62
+    for (x, y, w, h) in faces:
63
+        # Draw a green rectangle around the face
64
+        face = frame[y:(y+h), x:(x+w)]
65
+        center_x = x + (w/2)
66
+        center_y = y + (h/2)
67
+        center = [center_x, center_y]
68
+        image_faces.append(center)
69
+        tracking = False
70
+        for pos in last_image_faces:
71
+            #dist = sqrt( (center_x - pos[0])**2 + (center_y - pos[1])**2 )
72
+            dist = math.hypot(center_x - pos[0], center_y - pos[1])
73
+            print("Distance from last point " + str(dist))
74
+            if dist < 30:
75
+                tracking = True
76
+        if tracking == False:
77
+            cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 0, 255), 2)
78
+            recognizeFace(face)
79
+        else:
80
+            cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
81
+        break
82
+    last_image_faces = image_faces
83
+    return frame
84
+
85
+def recognizeFace(face):
86
+    print("Searching Face database...")
87
+    face = cv2.cvtColor(face, cv2.COLOR_BGR2RGBA)
88
+    count = 0
89
+    match_found = False
90
+    for f in face_db:
91
+        count = count + 1
92
+        # Initiate SIFT detector
93
+        orb = cv2.ORB()
94
+        # find the keypoints and descriptors with SIFT
95
+        kp1, des1 = orb.detectAndCompute(face,None)
96
+        kp2, des2 = orb.detectAndCompute(f,None)
97
+        # create BFMatcher object
98
+        bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)
99
+        # Match descriptors.
100
+        matches = bf.match(des1,des2)
101
+        if len(matches) > 0:
102
+            # found match
103
+            print("Match Found! (" + str(count) +")")
104
+            match_found = True
105
+            break
106
+    if match_found == False:
107
+        # Save picture
108
+        print("No match found! Searched " + str(count) + " records. Saving image.")
109
+        cv2.imwrite("/home/pi/photos/faces/face-" + str(len(face_db)) + ".jpg", face)
110
+        loadFaceDB()
111
+
112
+def loadFaceDB():
113
+    # Load faces
114
+    face_db_path='/home/pi/photos/faces'
115
+    onlyfiles = [ f for f in listdir(face_db_path) if isfile(join(face_db_path,f)) ]
116
+    global face_db
117
+    face_db = numpy.empty(len(onlyfiles), dtype=object)
118
+    for n in range(0, len(onlyfiles)):
119
+      face_db[n] = cv2.imread( join(face_db_path,onlyfiles[n]) )
120
+
121
+
122
+loadFaceDB()
123
+show_frame()
124
+root.mainloop()

+ 122 - 0
camera_test_v3.py

@@ -0,0 +1,122 @@
1
+import Tkinter as tk
2
+import cv2, sys, time, os, math
3
+from PIL import Image, ImageTk
4
+import numpy as numpy
5
+
6
+
7
+from SimpleCV import Color, np
8
+from SimpleCV import Image as SimpleImage
9
+
10
+from os import listdir
11
+from os.path import isfile, join
12
+
13
+# Load the BCM V4l2 driver for /dev/video0
14
+os.system('sudo modprobe bcm2835-v4l2')
15
+# Set the framerate ( not sure this does anything! )
16
+os.system('v4l2-ctl -p 4')
17
+
18
+width, height = 320, 240
19
+cap = cv2.VideoCapture(0)
20
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH,  width)
21
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, height)
22
+
23
+cascPath = '/home/pi/lbpcascade_frontalface.xml'
24
+faceCascade = cv2.CascadeClassifier(cascPath)
25
+
26
+root = tk.Tk()
27
+root.attributes("-fullscreen", True)
28
+root.bind('<Escape>', lambda e: root.quit())
29
+
30
+lmain = tk.Label(root)
31
+lmain.pack()
32
+
33
+last_image_faces = []
34
+
35
+def show_frame():
36
+    _, frame = cap.read()
37
+    frame = cv2.flip(frame, 1)
38
+    frame = faceDetect(frame)
39
+    cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)
40
+    img = Image.fromarray(cv2image)
41
+    imgtk = ImageTk.PhotoImage(image=img)
42
+    lmain.imgtk = imgtk
43
+    lmain.configure(image=imgtk)
44
+    lmain.after(1, show_frame)
45
+
46
+def faceDetect(frame):
47
+
48
+    # Do face detection
49
+    #faces = faceCascade.detectMultiScale(frame, 1.1, 3, 0, (10, 10))
50
+
51
+    #Slower method
52
+    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
53
+    gray = cv2.equalizeHist( gray )
54
+    faces = faceCascade.detectMultiScale(
55
+	gray,
56
+        scaleFactor=1.1,
57
+        minNeighbors=4,
58
+        minSize=(20, 20),
59
+        flags=cv2.cv.CV_HAAR_SCALE_IMAGE | cv2.cv.CV_HAAR_FIND_BIGGEST_OBJECT | cv2.cv.CV_HAAR_DO_ROUGH_SEARCH
60
+    )
61
+
62
+    print "Found {0} faces!".format(len(faces))
63
+    global last_image_faces
64
+    image_faces = []
65
+
66
+    for (x, y, w, h) in faces:
67
+        # Draw a green rectangle around the face
68
+        face = frame[y:(y+h), x:(x+w)]
69
+        center_x = x + (w/2)
70
+        center_y = y + (h/2)
71
+        center = [center_x, center_y]
72
+        image_faces.append(center)
73
+        tracking = False
74
+        for pos in last_image_faces:
75
+            #dist = sqrt( (center_x - pos[0])**2 + (center_y - pos[1])**2 )
76
+            dist = math.hypot(center_x - pos[0], center_y - pos[1])
77
+            print("Distance from last point " + str(dist))
78
+            if dist < 30:
79
+                tracking = True
80
+        if tracking == False:
81
+            cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 0, 255), 2)
82
+            recognizeFace(face)
83
+        else:
84
+            cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
85
+        break
86
+    last_image_faces = image_faces
87
+    return frame
88
+
89
+def recognizeFace(face):
90
+    face = cv2.cvtColor(face, cv2.COLOR_BGR2RGBA)
91
+    count = 0
92
+    match_found = False
93
+    for f in face_db:
94
+        count = count + 1
95
+        template = SimpleImage(f)
96
+        cv_face = SimpleImage(face)
97
+        print("Scaning DB face " + str(count))
98
+        keypoints = cv_face.drawSIFTKeyPointMatch(template,distance=50)
99
+        if keypoints:
100
+            # found match
101
+            print("Match Found! (" + str(count) +")")
102
+            match_found = True
103
+            break
104
+    if match_found == False:
105
+        # Save picture
106
+        print("No match found! Searched " + str(count) + " records. Saving image.")
107
+        cv2.imwrite("/home/pi/photos/faces/face-" + str(len(face_db)) + ".jpg", face)
108
+        loadFaceDB()
109
+
110
+def loadFaceDB():
111
+    # Load faces
112
+    face_db_path='/home/pi/photos/faces'
113
+    onlyfiles = [ f for f in listdir(face_db_path) if isfile(join(face_db_path,f)) ]
114
+    global face_db
115
+    face_db = numpy.empty(len(onlyfiles), dtype=object)
116
+    for n in range(0, len(onlyfiles)):
117
+      face_db[n] = cv2.imread( join(face_db_path,onlyfiles[n]) )
118
+
119
+
120
+loadFaceDB()
121
+show_frame()
122
+root.mainloop()

+ 132 - 0
camera_test_v4.py

@@ -0,0 +1,132 @@
1
+# FACE CAMERA
2
+
3
+import Tkinter as tk
4
+import cv2, sys, time, os, math
5
+from PIL import Image, ImageTk
6
+import numpy as numpy
7
+
8
+from os import listdir
9
+from os.path import isfile, join
10
+
11
+import RPi.GPIO as GPIO
12
+
13
+# Load the BCM V4l2 driver for /dev/video0
14
+os.system('sudo modprobe bcm2835-v4l2')
15
+# Set the framerate ( not sure this does anything! )
16
+os.system('v4l2-ctl -p 4')
17
+
18
+width, height = 320, 240
19
+cap = cv2.VideoCapture(0)
20
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH,  width)
21
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, height)
22
+
23
+cascPath = '/home/pi/lbpcascade_frontalface.xml'
24
+faceCascade = cv2.CascadeClassifier(cascPath)
25
+
26
+root = tk.Tk()
27
+root.attributes("-fullscreen", True)
28
+root.bind('<Escape>', lambda e: root.quit())
29
+
30
+lmain = tk.Label(root)
31
+lmain.pack()
32
+
33
+last_image_faces = []
34
+saved = False
35
+
36
+GPIO.setmode(GPIO.BOARD)
37
+GPIO.setup(12, GPIO.IN, pull_up_down = GPIO.PUD_UP)
38
+GPIO.setup(16, GPIO.IN, pull_up_down = GPIO.PUD_UP)
39
+GPIO.setup(18, GPIO.IN, pull_up_down = GPIO.PUD_UP)
40
+
41
+
42
+
43
+def show_frame():
44
+    _, frame = cap.read()
45
+    frame = cv2.flip(frame, 1)
46
+    frame = faceDetect(frame)
47
+    #frame = cv2.resize(frame, (320,240))
48
+    buttonPress()
49
+    cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)
50
+    img = Image.fromarray(cv2image)
51
+    imgtk = ImageTk.PhotoImage(image=img)
52
+    lmain.imgtk = imgtk
53
+    lmain.configure(image=imgtk)
54
+    lmain.after(1, show_frame)
55
+
56
+def faceDetect(frame):
57
+
58
+    # Do face detection
59
+    #faces = faceCascade.detectMultiScale(frame, 1.1, 3, 0, (10, 10))
60
+
61
+    #Slower method
62
+    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
63
+    gray = cv2.equalizeHist( gray )
64
+    faces = faceCascade.detectMultiScale(
65
+	gray,
66
+        scaleFactor=1.1,
67
+        minNeighbors=4,
68
+        minSize=(20, 20),
69
+        flags=cv2.cv.CV_HAAR_SCALE_IMAGE | cv2.cv.CV_HAAR_FIND_BIGGEST_OBJECT | cv2.cv.CV_HAAR_DO_ROUGH_SEARCH
70
+    )
71
+
72
+    #print "Found {0} faces!".format(len(faces))
73
+    global last_image_faces
74
+    image_faces = []
75
+
76
+    for (x, y, w, h) in faces:
77
+        center_x = x + (w/2)
78
+        center_y = y + (h/2)
79
+        start_y = center_y - 80
80
+        start_x = center_x - 80
81
+        face_crop = frame[start_y:(start_y+160), start_x:(start_x+160)]
82
+        image_faces.append(face_crop)
83
+        # Draw a green rectangle around the face
84
+        cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
85
+    last_image_faces = image_faces
86
+    return frame
87
+
88
+def saveFace():
89
+    global last_image_faces
90
+    for face in last_image_faces:
91
+        face = cv2.cvtColor(face, cv2.COLOR_BGR2GRAY)
92
+        face = cv2.equalizeHist(face)
93
+        cv2.imwrite("/home/pi/photos/faces/face-" + str(len(face_db)) + ".jpg", face)
94
+    loadFaceDB()
95
+
96
+def buttonPress():
97
+    global saved
98
+    if(GPIO.input(12) == 0):
99
+        if saved == False:
100
+            saveFace()
101
+            saved = True
102
+            print("Image Saved")
103
+    else:
104
+        saved = False
105
+    if(GPIO.input(16) == 0):
106
+        if saved == False:
107
+            saveFace()
108
+            saved = True
109
+            print("Image Saved")
110
+    else:
111
+        saved = False
112
+    if(GPIO.input(18) == 0):
113
+        if saved == False:
114
+            saveFace()
115
+            saved = True
116
+            print("Image Saved")
117
+    else:
118
+        saved = False
119
+
120
+def loadFaceDB():
121
+    # Load faces
122
+    face_db_path='/home/pi/photos/faces'
123
+    onlyfiles = [ f for f in listdir(face_db_path) if isfile(join(face_db_path,f)) ]
124
+    global face_db
125
+    face_db = numpy.empty(len(onlyfiles), dtype=object)
126
+    for n in range(0, len(onlyfiles)):
127
+      face_db[n] = cv2.imread( join(face_db_path,onlyfiles[n]) )
128
+
129
+loadFaceDB()
130
+show_frame()
131
+root.mainloop()
132
+GPIO.cleanup()

+ 129 - 0
face_camera_v2.py

@@ -0,0 +1,129 @@
1
+import Tkinter as tk
2
+import cv2, sys, time, os, math
3
+from PIL import Image, ImageTk
4
+import numpy as numpy
5
+
6
+from os import listdir
7
+from os.path import isfile, join
8
+
9
+import RPi.GPIO as GPIO
10
+
11
+# Load the BCM V4l2 driver for /dev/video0
12
+os.system('sudo modprobe bcm2835-v4l2')
13
+# Set the framerate ( not sure this does anything! )
14
+os.system('v4l2-ctl -p 4')
15
+
16
+width, height = 320, 240
17
+cap = cv2.VideoCapture(0)
18
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH,  width)
19
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, height)
20
+
21
+cascPath = '/home/pi/lbpcascade_frontalface.xml'
22
+faceCascade = cv2.CascadeClassifier(cascPath)
23
+
24
+root = tk.Tk()
25
+root.attributes("-fullscreen", True)
26
+root.bind('<Escape>', lambda e: root.quit())
27
+
28
+lmain = tk.Label(root)
29
+lmain.pack()
30
+
31
+last_image_faces = []
32
+saved = False
33
+
34
+GPIO.setmode(GPIO.BOARD)
35
+GPIO.setup(12, GPIO.IN, pull_up_down = GPIO.PUD_UP)
36
+GPIO.setup(16, GPIO.IN, pull_up_down = GPIO.PUD_UP)
37
+GPIO.setup(18, GPIO.IN, pull_up_down = GPIO.PUD_UP)
38
+
39
+
40
+
41
+def show_frame():
42
+    _, frame = cap.read()
43
+    frame = cv2.flip(frame, 1)
44
+    frame = faceDetect(frame)
45
+    #frame = cv2.resize(frame, (320,240))
46
+    buttonPress()
47
+    cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)
48
+    img = Image.fromarray(cv2image)
49
+    imgtk = ImageTk.PhotoImage(image=img)
50
+    lmain.imgtk = imgtk
51
+    lmain.configure(image=imgtk)
52
+    lmain.after(1, show_frame)
53
+
54
+def faceDetect(frame):
55
+
56
+    # Do face detection
57
+    #faces = faceCascade.detectMultiScale(frame, 1.1, 3, 0, (10, 10))
58
+
59
+    #Slower method
60
+    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
61
+    gray = cv2.equalizeHist( gray )
62
+    faces = faceCascade.detectMultiScale(
63
+	    gray,
64
+        scaleFactor=1.1,
65
+        minNeighbors=4,
66
+        minSize=(20, 20),
67
+        flags=cv2.cv.CV_HAAR_SCALE_IMAGE | cv2.cv.CV_HAAR_FIND_BIGGEST_OBJECT | cv2.cv.CV_HAAR_DO_ROUGH_SEARCH
68
+    )
69
+
70
+    #print "Found {0} faces!".format(len(faces))
71
+    global last_image_faces
72
+    image_faces = []
73
+
74
+    for (x, y, w, h) in faces:
75
+        center_x = x + (w/2)
76
+        center_y = y + (h/2)
77
+        start_y = center_y - 40
78
+        start_x = center_x - 40
79
+        face = gray[y:(y+h), x:(x+w)]
80
+        image_faces.append(face)
81
+        # Draw a green rectangle around the face
82
+        cv2.rectangle(frame, (x, y), (start_x+w, start_y+h), (0, 255, 0), 2)
83
+    last_image_faces = image_faces
84
+    return frame
85
+
86
+def saveFace():
87
+    global last_image_faces
88
+    for face in last_image_faces:
89
+        face = cv2.resize(face, (120, 120))
90
+        face = cv2.equalizeHist(face)
91
+        cv2.imwrite("/home/pi/photos/faces/face-" + str(len(face_db) + 1) + ".jpg", face)
92
+        print("Saved image " + str(len(face_db)))
93
+    loadFaceDB()
94
+
95
+def buttonPress():
96
+    global saved
97
+    if(GPIO.input(12) == 0):
98
+        if saved == False:
99
+            saveFace()
100
+            saved = True
101
+
102
+    else:
103
+        saved = False
104
+    if(GPIO.input(16) == 0):
105
+        if saved == False:
106
+            saveFace()
107
+            saved = True
108
+    else:
109
+        saved = False
110
+    if(GPIO.input(18) == 0):
111
+        if saved == False:
112
+            saveFace()
113
+            saved = True
114
+    else:
115
+        saved = False
116
+
117
+def loadFaceDB():
118
+    # Load faces
119
+    face_db_path='/home/pi/photos/faces'
120
+    onlyfiles = [ f for f in listdir(face_db_path) if isfile(join(face_db_path,f)) ]
121
+    global face_db
122
+    face_db = numpy.empty(len(onlyfiles), dtype=object)
123
+    for n in range(0, len(onlyfiles)):
124
+      face_db[n] = cv2.imread( join(face_db_path,onlyfiles[n]) )
125
+
126
+loadFaceDB()
127
+show_frame()
128
+root.mainloop()
129
+GPIO.cleanup()

+ 217 - 0
face_recognizer_v1.py

@@ -0,0 +1,217 @@
1
+#FACE RECOGNIZER
2
+
3
+import Tkinter as tk
4
+import cv2, sys, time, os, math
5
+from PIL import Image, ImageTk
6
+import numpy as numpy
7
+import pprint
8
+import random
9
+import math
10
+
11
+from os import listdir
12
+from os.path import isfile, join
13
+
14
+# Load the BCM V4l2 driver for /dev/video0
15
+os.system('sudo modprobe bcm2835-v4l2')
16
+# Set the framerate ( not sure this does anything! )
17
+os.system('v4l2-ctl -p 4')
18
+
19
+width, height = 320, 240
20
+cap = cv2.VideoCapture(0)
21
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH,  width)
22
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, height)
23
+
24
+cascPath = '/home/pi/lbpcascade_frontalface.xml'
25
+faceCascade = cv2.CascadeClassifier(cascPath)
26
+
27
+root = tk.Tk()
28
+root.attributes("-fullscreen", True)
29
+root.bind('<Escape>', lambda e: root.quit())
30
+
31
+lmain = tk.Label(root)
32
+lmain.pack()
33
+
34
+last_image_faces = []
35
+users = []
36
+
37
+font = cv2.FONT_HERSHEY_COMPLEX_SMALL
38
+
39
+
40
+
41
+def show_frame():
42
+    _, frame = cap.read()
43
+    frame = cv2.flip(frame, 1)
44
+    frame = faceDetect(frame)
45
+    cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)
46
+    img = Image.fromarray(cv2image)
47
+    imgtk = ImageTk.PhotoImage(image=img)
48
+    lmain.imgtk = imgtk
49
+    lmain.configure(image=imgtk)
50
+    lmain.after(1, show_frame)
51
+
52
+def faceDetect(frame):
53
+
54
+    # Do face detection
55
+    #faces = faceCascade.detectMultiScale(frame, 1.1, 3, 0, (10, 10))
56
+
57
+    #Slower method
58
+    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
59
+    gray = cv2.equalizeHist( gray )
60
+    faces = faceCascade.detectMultiScale(
61
+	    gray,
62
+        scaleFactor=1.1,
63
+        minNeighbors=4,
64
+        minSize=(20, 20),
65
+        flags=cv2.cv.CV_HAAR_SCALE_IMAGE | cv2.cv.CV_HAAR_FIND_BIGGEST_OBJECT | cv2.cv.CV_HAAR_DO_ROUGH_SEARCH
66
+    )
67
+
68
+    print "Found {0} faces!".format(len(faces))
69
+    global last_image_faces
70
+    image_faces = []
71
+
72
+    for (x, y, w, h) in faces:
73
+        counter = 1
74
+        center_x = x + (w/2)
75
+        center_y = y + (h/2)
76
+        start_y = center_y - 40
77
+        start_x = center_x - 40
78
+        if len(last_image_faces) > 0:
79
+            pos = last_image_faces[0]
80
+            last_image_faces.remove(pos)
81
+            dist = math.hypot(center_x - pos[0], center_y - pos[1])
82
+            if dist < 30:
83
+
84
+                # Info = [center_x, center_y, time_since_last_check, user, score]
85
+                center = [center_x, center_y, pos[2] + 1]
86
+                print("Tracking face " + str(counter))
87
+                counter = counter + 1
88
+                if center[2] > 6:
89
+                    if start_x > 0 and start_y > 0:
90
+                        face_crop = frame[y:(y+h), x:(x+w)]
91
+                        info = recognizeFace(face_crop)
92
+                        center[2] = 1
93
+                        center.append(info[0])
94
+                        center.append(info[1])
95
+                image_faces.append(center)
96
+                cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255), 2)
97
+                if len(pos) > 3:
98
+                    center.append(pos[3])
99
+                    center.append(pos[4])
100
+                    if pos[4] < 2000:
101
+                        cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 0, 255), 2)
102
+                        cv2.putText(frame, "%.1f" % (center[4]/1000), ((x + w - 38), (y + 17)), font, 1, (0,0,255), 1, 1)
103
+                    else:
104
+                        cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
105
+                        cv2.putText(frame, users[center[3]], (x, (y + h + 15)), font, 1, (0,255,0), 1, 1)
106
+                        cv2.putText(frame, "%.1f" % (center[4]/1000), ((x + w - 38), (y + 17)), font, 1, (0,255,0), 1, 1)
107
+            else:
108
+                center = [center_x, center_y, 1]
109
+                image_faces.append(center)
110
+                cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255), 2)
111
+        else:
112
+            center = [center_x, center_y, 1]
113
+            image_faces.append(center)
114
+            cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255), 2)
115
+    print("Number of faces detected " + str(len(last_image_faces)))
116
+    last_image_faces = image_faces
117
+    return frame
118
+
119
+def recognizeFace(face):
120
+    print("Searching Face database...")
121
+    match_found = False
122
+    face = cv2.resize(face, (120, 120))
123
+    face = cv2.cvtColor(face, cv2.cv.CV_BGR2GRAY)
124
+    face = cv2.equalizeHist( face )
125
+    cv2.imwrite("/home/pi/photos/faces/face-" + str(len(face_db) + 1) + ".jpg", face)
126
+    loadFaceDB()
127
+    predicted_label = predict_image_from_model(model, face)
128
+    print 'Predicted: %(predicted)s  ' %  {"predicted": users[predicted_label[0]]}
129
+    print predicted_label[1]
130
+    return predicted_label
131
+
132
+def loadFaceDB():
133
+    # Load faces
134
+    face_db_path='/home/pi/photos/faces'
135
+    onlyfiles = [ f for f in listdir(face_db_path) if isfile(join(face_db_path,f)) ]
136
+    global face_db
137
+    face_db = numpy.empty(len(onlyfiles), dtype=object)
138
+    for n in range(0, len(onlyfiles)):
139
+      face_db[n] = cv2.imread( join(face_db_path,onlyfiles[n]) )
140
+
141
+# Face Recognition
142
+
143
+def create_and_train_model_from_dict(label_matrix):
144
+    """ Create eigenface model from dict of labels and images """
145
+    model = cv2.createEigenFaceRecognizer()
146
+    model.train(label_matrix.values(), numpy.array(label_matrix.keys()))
147
+    return model
148
+
149
+def predict_image_from_model(model, image):
150
+    """ Given an eigenface model, predict the label of an image"""
151
+    return model.predict(image)
152
+
153
+def read_csv(filename='/home/pi/faces.csv'):
154
+    """ Read a csv file """
155
+    csv = open(filename, 'r')
156
+    return csv
157
+
158
+def prepare_training_testing_data(file):
159
+    """ prepare testing and training data from file"""
160
+    lines = file.readlines()
161
+    training_data, testing_data = split_test_training_data(lines)
162
+    return training_data
163
+
164
+def create_label_matrix_dict(input_file):
165
+    """ Create dict of label -> matricies from file """
166
+    ### for every line, if key exists, insert into dict, else append
167
+    label_dict = {}
168
+
169
+    for line in input_file:
170
+        print(line)
171
+        ## split on the ';' in the csv separating filename;label
172
+        filename, label = line.strip().split(';')
173
+
174
+        ##update the current key if it exists, else append to it
175
+        if label_dict.has_key(int(label)):
176
+            current_files = label_dict.get(label)
177
+            numpy.append(current_files,read_matrix_from_file(filename))
178
+        else:
179
+            label_dict[int(label)] = read_matrix_from_file(filename)
180
+
181
+    return label_dict
182
+
183
+def split_test_training_data(data, ratio=0.2):
184
+    """ Split a list of image files by ratio of training:test data """
185
+    test_size = int(math.floor(ratio*len(data)))
186
+    random.shuffle(data)
187
+    return data[test_size:], data[:test_size]
188
+
189
+def read_matrix_from_file(filename):
190
+    """ read in grayscale version of image from file """
191
+    return cv2.imread(filename, cv2.CV_LOAD_IMAGE_GRAYSCALE)
192
+
193
+def create_cvs():
194
+    BASE_PATH="/home/pi/photos/recognized_faces"
195
+    SEPARATOR=";"
196
+    label = 0
197
+    open("faces.csv", 'w').close()
198
+    with open("faces.csv", "a") as myfile:
199
+        for dirname, dirnames, filenames in os.walk(BASE_PATH):
200
+            for subdirname in dirnames:
201
+                users.append(subdirname)
202
+                subject_path = os.path.join(dirname, subdirname)
203
+                for filename in os.listdir(subject_path):
204
+                    abs_path = "%s/%s" % (subject_path, filename)
205
+                    myfile.write("%s%s%d\n" % (abs_path, SEPARATOR, label))
206
+                label = label + 1
207
+
208
+
209
+# Face Recognition vars
210
+create_cvs()
211
+training_data = prepare_training_testing_data(read_csv())
212
+data_dict = create_label_matrix_dict(training_data)
213
+model = create_and_train_model_from_dict(data_dict)
214
+
215
+loadFaceDB()
216
+show_frame()
217
+root.mainloop()

+ 220 - 0
face_recognizer_v2.py

@@ -0,0 +1,220 @@
1
+#FACE RECOGNIZER
2
+
3
+import Tkinter as tk
4
+import cv2, sys, time, os, math
5
+from PIL import Image, ImageTk
6
+import numpy as numpy
7
+import pprint
8
+import random
9
+import math
10
+from os import listdir
11
+from os.path import isfile, join
12
+import zerorpc
13
+
14
+# Load the BCM V4l2 driver for /dev/video0
15
+os.system('sudo modprobe bcm2835-v4l2')
16
+# Set the framerate ( not sure this does anything! )
17
+os.system('v4l2-ctl -p 4')
18
+
19
+width, height = 320, 240
20
+cap = cv2.VideoCapture(0)
21
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH,  width)
22
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, height)
23
+
24
+cascPath = '/home/pi/lbpcascade_frontalface.xml'
25
+faceCascade = cv2.CascadeClassifier(cascPath)
26
+
27
+root = tk.Tk()
28
+root.attributes("-fullscreen", True)
29
+root.bind('<Escape>', lambda e: root.quit())
30
+
31
+lmain = tk.Label(root)
32
+lmain.pack()
33
+
34
+last_image_faces = []
35
+users = []
36
+
37
+font = cv2.FONT_HERSHEY_COMPLEX_SMALL
38
+
39
+c = zerorpc.Client()
40
+c.connect("tcp://192.168.1.40:4242")
41
+
42
+def show_frame():
43
+    _, frame = cap.read()
44
+    frame = cv2.flip(frame, 1)
45
+    frame = faceDetect(frame)
46
+    cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)
47
+    img = Image.fromarray(cv2image)
48
+    imgtk = ImageTk.PhotoImage(image=img)
49
+    lmain.imgtk = imgtk
50
+    lmain.configure(image=imgtk)
51
+    lmain.after(1, show_frame)
52
+
53
+def faceDetect(frame):
54
+
55
+    # Do face detection
56
+    #faces = faceCascade.detectMultiScale(frame, 1.1, 3, 0, (10, 10))
57
+
58
+    #Slower method
59
+    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
60
+    gray = cv2.equalizeHist( gray )
61
+    faces = faceCascade.detectMultiScale(
62
+	    gray,
63
+        scaleFactor=1.1,
64
+        minNeighbors=4,
65
+        minSize=(20, 20),
66
+        flags=cv2.cv.CV_HAAR_SCALE_IMAGE | cv2.cv.CV_HAAR_FIND_BIGGEST_OBJECT | cv2.cv.CV_HAAR_DO_ROUGH_SEARCH
67
+    )
68
+
69
+    print "Found {0} faces!".format(len(faces))
70
+    global last_image_faces
71
+    image_faces = []
72
+
73
+    for (x, y, w, h) in faces:
74
+        counter = 1
75
+        center_x = x + (w/2)
76
+        center_y = y + (h/2)
77
+        start_y = center_y - 40
78
+        start_x = center_x - 40
79
+        if len(last_image_faces) > 0:
80
+            pos = last_image_faces[0]
81
+            last_image_faces.remove(pos)
82
+            dist = math.hypot(center_x - pos[0], center_y - pos[1])
83
+            if dist < 30:
84
+
85
+                # Info = [center_x, center_y, time_since_last_check, user, score]
86
+                center = [center_x, center_y, pos[2] + 1]
87
+                print("Tracking face " + str(counter))
88
+                counter = counter + 1
89
+                if center[2] > 6:
90
+                    if start_x > 0 and start_y > 0:
91
+                        face_crop = frame[y:(y+h), x:(x+w)]
92
+                        info = recognizeFace(face_crop)
93
+                        center[2] = 1
94
+                        center.append(info[0])
95
+                        center.append(info[1])
96
+                image_faces.append(center)
97
+                cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255), 2)
98
+                if len(pos) > 3:
99
+                    center.append(pos[3])
100
+                    center.append(pos[4])
101
+                    if pos[4] < 2000:
102
+                        cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 0, 255), 2)
103
+                        cv2.putText(frame, "%.1f" % (center[4]/1000), ((x + w - 38), (y + 17)), font, 1, (0,0,255), 1, 1)
104
+                    else:
105
+                        cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
106
+                        cv2.putText(frame, users[center[3]], (x, (y + h + 15)), font, 1, (0,255,0), 1, 1)
107
+                        cv2.putText(frame, "%.1f" % (center[4]/1000), ((x + w - 38), (y + 17)), font, 1, (0,255,0), 1, 1)
108
+            else:
109
+                center = [center_x, center_y, 1]
110
+                image_faces.append(center)
111
+                cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255), 2)
112
+        else:
113
+            center = [center_x, center_y, 1]
114
+            image_faces.append(center)
115
+            cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255), 2)
116
+    print("Number of faces detected " + str(len(last_image_faces)))
117
+    last_image_faces = image_faces
118
+    return frame
119
+
120
+def recognizeFace(face):
121
+    print("Searching Face database...")
122
+    match_found = False
123
+    face = cv2.resize(face, (120, 120))
124
+    face = cv2.cvtColor(face, cv2.cv.CV_BGR2GRAY)
125
+    face = cv2.equalizeHist( face )
126
+    cv2.imwrite("/home/pi/photos/faces/face-" + str(len(face_db) + 1) + ".jpg", face)
127
+    loadFaceDB()
128
+    predicted_label = predict_image_from_model(model, face)
129
+    print 'Predicted: %(predicted)s  ' %  {"predicted": users[predicted_label[0]]}
130
+    print str(predicted_label[0]) + " - " + str(predicted_label[1])
131
+    print c.face_recognized(users[predicted_label[0]])
132
+    return predicted_label
133
+
134
+def loadFaceDB():
135
+    # Load faces
136
+    face_db_path='/home/pi/photos/faces'
137
+    onlyfiles = [ f for f in listdir(face_db_path) if isfile(join(face_db_path,f)) ]
138
+    global face_db
139
+    face_db = numpy.empty(len(onlyfiles), dtype=object)
140
+    for n in range(0, len(onlyfiles)):
141
+      face_db[n] = cv2.imread( join(face_db_path,onlyfiles[n]) )
142
+
143
+# Face Recognition
144
+
145
+def create_and_train_model_from_dict(label_matrix):
146
+    """ Create eigenface model from dict of labels and images """
147
+    model = cv2.createEigenFaceRecognizer()
148
+    model.train(label_matrix.values(), numpy.array(label_matrix.keys()))
149
+    return model
150
+
151
+def predict_image_from_model(model, image):
152
+    """ Given an eigenface model, predict the label of an image"""
153
+    return model.predict(image)
154
+
155
+def read_csv(filename='/home/pi/faces.csv'):
156
+    """ Read a csv file """
157
+    csv = open(filename, 'r')
158
+    return csv
159
+
160
+def prepare_training_testing_data(file):
161
+    """ prepare testing and training data from file"""
162
+    lines = file.readlines()
163
+    training_data, testing_data = split_test_training_data(lines)
164
+    return training_data
165
+
166
+def create_label_matrix_dict(input_file):
167
+    """ Create dict of label -> matricies from file """
168
+    ### for every line, if key exists, insert into dict, else append
169
+    label_dict = {}
170
+
171
+    for line in input_file:
172
+        print(line)
173
+        ## split on the ';' in the csv separating filename;label
174
+        filename, label = line.strip().split(';')
175
+
176
+        ##update the current key if it exists, else append to it
177
+        if label_dict.has_key(int(label)):
178
+            current_files = label_dict.get(label)
179
+            numpy.append(current_files,read_matrix_from_file(filename))
180
+        else:
181
+            label_dict[int(label)] = read_matrix_from_file(filename)
182
+
183
+    return label_dict
184
+
185
+def split_test_training_data(data, ratio=0.2):
186
+    """ Split a list of image files by ratio of training:test data """
187
+    test_size = int(math.floor(ratio*len(data)))
188
+    random.shuffle(data)
189
+    return data[test_size:], data[:test_size]
190
+
191
+def read_matrix_from_file(filename):
192
+    """ read in grayscale version of image from file """
193
+    return cv2.imread(filename, cv2.CV_LOAD_IMAGE_GRAYSCALE)
194
+
195
+def create_csv():
196
+    BASE_PATH="/home/pi/photos/recognized_faces"
197
+    SEPARATOR=";"
198
+    label = 0
199
+    open("/home/pi/faces.csv", 'w').close()
200
+    with open("/home/pi/faces.csv", "a") as myfile:
201
+        for dirname, dirnames, filenames in os.walk(BASE_PATH):
202
+            for subdirname in dirnames:
203
+                users.append(subdirname)
204
+                subject_path = os.path.join(dirname, subdirname)
205
+                for filename in os.listdir(subject_path):
206
+                    abs_path = "%s/%s" % (subject_path, filename)
207
+                    myfile.write("%s%s%d\n" % (abs_path, SEPARATOR, label))
208
+                label = label + 1
209
+
210
+
211
+# Face Recognition vars
212
+create_csv()
213
+training_data = prepare_training_testing_data(read_csv())
214
+data_dict = create_label_matrix_dict(training_data)
215
+model = create_and_train_model_from_dict(data_dict)
216
+
217
+loadFaceDB()
218
+show_frame()
219
+
220
+root.mainloop()

+ 224 - 0
face_recognizer_v3.py

@@ -0,0 +1,224 @@
1
+#FACE RECOGNIZER
2
+
3
+#import Tkinter as tk
4
+import cv2, sys, time, os, math
5
+from PIL import Image, ImageTk
6
+import numpy as numpy
7
+import pprint
8
+import random
9
+import math
10
+from os import listdir
11
+from os.path import isfile, join
12
+import zerorpc
13
+
14
+# Load the BCM V4l2 driver for /dev/video0
15
+os.system('sudo modprobe bcm2835-v4l2')
16
+# Set the framerate ( not sure this does anything! )
17
+#os.system('v4l2-ctl -p 4')
18
+
19
+width, height = 320, 240
20
+cap = cv2.VideoCapture()
21
+cap.open(0)
22
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH,  width)
23
+cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, height)
24
+
25
+cascPath = '/Users/james/dev/betabot-python-tests/lbpcascade_frontalface.xml'
26
+faceCascade = cv2.CascadeClassifier(cascPath)
27
+
28
+#root = tk.Tk()
29
+#root.attributes("-fullscreen", True)
30
+#root.bind('<Escape>', lambda e: root.quit())
31
+
32
+#lmain = tk.Label(root)
33
+#lmain.pack()
34
+
35
+last_image_faces = []
36
+users = []
37
+
38
+font = cv2.FONT_HERSHEY_COMPLEX_SMALL
39
+
40
+c = zerorpc.Client()
41
+c.connect("tcp://192.168.1.40:4242")
42
+
43
+def show_frame():
44
+    _, frame = cap.read()
45
+    frame = cv2.flip(frame, 1)
46
+    frame = faceDetect(frame)
47
+    cv2.imshow("webcam",frame);
48
+    cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)
49
+    img = Image.fromarray(cv2image)
50
+    #imgtk = ImageTk.PhotoImage(image=img)
51
+    #lmain.imgtk = imgtk
52
+    #lmain.configure(image=imgtk)
53
+    #lmain.after(1, show_frame)
54
+
55
+def faceDetect(frame):
56
+
57
+    # Do face detection
58
+    #faces = faceCascade.detectMultiScale(frame, 1.1, 3, 0, (10, 10))
59
+
60
+    #Slower method
61
+    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
62
+    gray = cv2.equalizeHist( gray )
63
+    faces = faceCascade.detectMultiScale(
64
+	    gray,
65
+        scaleFactor=1.1,
66
+        minNeighbors=4,
67
+        minSize=(20, 20),
68
+        flags=cv2.cv.CV_HAAR_SCALE_IMAGE | cv2.cv.CV_HAAR_FIND_BIGGEST_OBJECT | cv2.cv.CV_HAAR_DO_ROUGH_SEARCH
69
+    )
70
+
71
+    print "Found {0} faces!".format(len(faces))
72
+    global last_image_faces
73
+    image_faces = []
74
+
75
+    for (x, y, w, h) in faces:
76
+        counter = 1
77
+        center_x = x + (w/2)
78
+        center_y = y + (h/2)
79
+        start_y = center_y - 40
80
+        start_x = center_x - 40
81
+        if len(last_image_faces) > 0:
82
+            pos = last_image_faces[0]
83
+            last_image_faces.remove(pos)
84
+            dist = math.hypot(center_x - pos[0], center_y - pos[1])
85
+            if dist < 30:
86
+
87
+                # Info = [center_x, center_y, time_since_last_check, user, score]
88
+                center = [center_x, center_y, pos[2] + 1]
89
+                print("Tracking face " + str(counter))
90
+                counter = counter + 1
91
+                if center[2] > 6:
92
+                    if start_x > 0 and start_y > 0:
93
+                        face_crop = frame[y:(y+h), x:(x+w)]
94
+                        info = recognizeFace(face_crop)
95
+                        center[2] = 1
96
+                        center.append(info[0])
97
+                        center.append(info[1])
98
+                image_faces.append(center)
99
+                cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255), 2)
100
+                if len(pos) > 3:
101
+                    center.append(pos[3])
102
+                    center.append(pos[4])
103
+                    if pos[4] < 2000:
104
+                        cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 0, 255), 2)
105
+                        cv2.putText(frame, "%.1f" % (center[4]/1000), ((x + w - 38), (y + 17)), font, 1, (0,0,255), 1, 1)
106
+                    else:
107
+                        cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
108
+                        cv2.putText(frame, users[center[3]], (x, (y + h + 15)), font, 1, (0,255,0), 1, 1)
109
+                        cv2.putText(frame, "%.1f" % (center[4]/1000), ((x + w - 38), (y + 17)), font, 1, (0,255,0), 1, 1)
110
+            else:
111
+                center = [center_x, center_y, 1]
112
+                image_faces.append(center)
113
+                cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255), 2)
114
+        else:
115
+            center = [center_x, center_y, 1]
116
+            image_faces.append(center)
117
+            cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255), 2)
118
+    print("Number of faces detected " + str(len(last_image_faces)))
119
+    last_image_faces = image_faces
120
+    return frame
121
+
122
+def recognizeFace(face):
123
+    print("Searching Face database...")
124
+    match_found = False
125
+    face = cv2.resize(face, (120, 120))
126
+    face = cv2.cvtColor(face, cv2.cv.CV_BGR2GRAY)
127
+    face = cv2.equalizeHist( face )
128
+    cv2.imwrite("/Users/james/dev/betabot-python-tests/photos/faces/face-" + str(len(face_db) + 1) + ".jpg", face)
129
+    loadFaceDB()
130
+    predicted_label = predict_image_from_model(model, face)
131
+    print 'Predicted: %(predicted)s  ' %  {"predicted": users[predicted_label[0]]}
132
+    print str(predicted_label[0]) + " - " + str(predicted_label[1])
133
+    print c.face_recognized(users[predicted_label[0]])
134
+    return predicted_label
135
+
136
+def loadFaceDB():
137
+    # Load faces
138
+    face_db_path='/Users/james/dev/betabot-python-tests/photos/faces'
139
+    onlyfiles = [ f for f in listdir(face_db_path) if isfile(join(face_db_path,f)) ]
140
+    global face_db
141
+    face_db = numpy.empty(len(onlyfiles), dtype=object)
142
+    for n in range(0, len(onlyfiles)):
143
+      face_db[n] = cv2.imread( join(face_db_path,onlyfiles[n]) )
144
+
145
+# Face Recognition
146
+
147
+def create_and_train_model_from_dict(label_matrix):
148
+    """ Create eigenface model from dict of labels and images """
149
+    model = cv2.createEigenFaceRecognizer()
150
+    model.train(label_matrix.values(), numpy.array(label_matrix.keys()))
151
+    return model
152
+
153
+def predict_image_from_model(model, image):
154
+    """ Given an eigenface model, predict the label of an image"""
155
+    return model.predict(image)
156
+
157
+def read_csv(filename='/Users/james/dev/betabot-python-tests/faces.csv'):
158
+    """ Read a csv file """
159
+    csv = open(filename, 'r')
160
+    return csv
161
+
162
+def prepare_training_testing_data(file):
163
+    """ prepare testing and training data from file"""
164
+    lines = file.readlines()
165
+    training_data, testing_data = split_test_training_data(lines)
166
+    return training_data
167
+
168
+def create_label_matrix_dict(input_file):
169
+    """ Create dict of label -> matricies from file """
170
+    ### for every line, if key exists, insert into dict, else append
171
+    label_dict = {}
172
+
173
+    for line in input_file:
174
+        print(line)
175
+        ## split on the ';' in the csv separating filename;label
176
+        filename, label = line.strip().split(';')
177
+
178
+        ##update the current key if it exists, else append to it
179
+        if label_dict.has_key(int(label)):
180
+            current_files = label_dict.get(label)
181
+            numpy.append(current_files,read_matrix_from_file(filename))
182
+        else:
183
+            label_dict[int(label)] = read_matrix_from_file(filename)
184
+
185
+    return label_dict
186
+
187
+def split_test_training_data(data, ratio=0.2):
188
+    """ Split a list of image files by ratio of training:test data """
189
+    test_size = int(math.floor(ratio*len(data)))
190
+    random.shuffle(data)
191
+    return data[test_size:], data[:test_size]
192
+
193
+def read_matrix_from_file(filename):
194
+    """ read in grayscale version of image from file """
195
+    return cv2.imread(filename, cv2.CV_LOAD_IMAGE_GRAYSCALE)
196
+
197
+def create_csv():
198
+    BASE_PATH="/Users/james/dev/betabot-python-tests/photos/recognized_faces"
199
+    SEPARATOR=";"
200
+    label = 0
201
+    open("/Users/james/dev/betabot-python-tests/faces.csv", 'w').close()
202
+    with open("/Users/james/dev/betabot-python-tests/faces.csv", "a") as myfile:
203
+        for dirname, dirnames, filenames in os.walk(BASE_PATH):
204
+            for subdirname in dirnames:
205
+                users.append(subdirname)
206
+                subject_path = os.path.join(dirname, subdirname)
207
+                for filename in os.listdir(subject_path):
208
+                    abs_path = "%s/%s" % (subject_path, filename)
209
+                    myfile.write("%s%s%d\n" % (abs_path, SEPARATOR, label))
210
+                label = label + 1
211
+
212
+
213
+# Face Recognition vars
214
+create_csv()
215
+training_data = prepare_training_testing_data(read_csv())
216
+data_dict = create_label_matrix_dict(training_data)
217
+model = create_and_train_model_from_dict(data_dict)
218
+
219
+loadFaceDB()
220
+
221
+while True:
222
+    show_frame()
223
+
224
+#root.mainloop()

+ 122 - 0
facetracker.py

@@ -0,0 +1,122 @@
1
+#!/usr/bin/env python
2
+
3
+import cv2, sys, time, os
4
+#from pantilt import *
5
+import Tkinter as tk
6
+import PIL
7
+from PIL import ImageTk
8
+from PIL import Image
9
+
10
+# Load the BCM V4l2 driver for /dev/video0
11
+os.system('sudo modprobe bcm2835-v4l2')
12
+# Set the framerate ( not sure this does anything! )
13
+os.system('v4l2-ctl -p 4')
14
+
15
+# Frame Size. Smaller is faster, but less accurate.
16
+# Wide and short is better, since moving your head
17
+# vertically is kinda hard!
18
+FRAME_W = 320
19
+FRAME_H = 240
20
+
21
+# Default Pan/Tilt for the camera in degrees.
22
+# Camera range is from 0 to 180
23
+cam_pan = 70
24
+cam_tilt = 70
25
+
26
+# Set up the CascadeClassifier for face tracking
27
+#cascPath = 'haarcascade_frontalface_default.xml' # sys.argv[1]
28
+cascPath = '/home/pi/lbpcascade_frontalface.xml'
29
+faceCascade = cv2.CascadeClassifier(cascPath)
30
+
31
+# Set up the capture with our frame size
32
+video_capture = cv2.VideoCapture(0)
33
+video_capture.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH,  FRAME_W)
34
+video_capture.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, FRAME_H)
35
+time.sleep(2)
36
+
37
+window = tk.Tk()  #Makes main window
38
+window.wm_title("Digital Microscope")
39
+window.config(background="#FFFFFF")
40
+#window.attributes("-fullscreen", True)
41
+
42
+# Turn the camera to the default position
43
+#pan(cam_pan)
44
+#tilt(cam_tilt)
45
+
46
+window.mainloop()
47
+
48
+while True:
49
+    # Capture frame-by-frame
50
+    ret, frame = video_capture.read()
51
+
52
+    if ret == False:
53
+      print("Error getting image")
54
+      continue
55
+
56
+    # Convert to greyscale for detection
57
+    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
58
+    gray = cv2.equalizeHist( gray )
59
+
60
+    # Do face detection
61
+    #faces = faceCascade.detectMultiScale(frame, 1.1, 3, 0, (10, 10))
62
+
63
+    # Slower method
64
+    faces = faceCascade.detectMultiScale(
65
+	gray,
66
+        scaleFactor=1.1,
67
+        minNeighbors=4,
68
+        minSize=(20, 20),
69
+        flags=cv2.cv.CV_HAAR_SCALE_IMAGE | cv2.cv.CV_HAAR_FIND_BIGGEST_OBJECT | cv2.cv.CV_HAAR_DO_ROUGH_SEARCH
70
+    )
71
+
72
+    print "Found {0} faces!".format(len(faces))
73
+
74
+    for (x, y, w, h) in faces:
75
+        # Draw a green rectangle around the face
76
+        cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
77
+
78
+        # Track first face
79
+
80
+        # Get the center of the face
81
+
82
+        # Correct relative to center of image
83
+        turn_x  = float(x - (FRAME_W/2))
84
+        turn_y  = float(y - (FRAME_H/2))
85
+
86
+        # Convert to percentage offset
87
+        turn_x  /= float(FRAME_W/2)
88
+        turn_y  /= float(FRAME_H/2)
89
+
90
+        # Scale offset to degrees
91
+        turn_x   *= 2.5 # VFOV
92
+        turn_y   *= 2.5 # HFOV
93
+        cam_pan  += -turn_x
94
+        cam_tilt += turn_y
95
+
96
+        # Clamp Pan/Tilt to 0 to 180 degrees
97
+        # cam_pan = max(0,min(180,cam_pan))
98
+        # cam_tilt = max(0,min(180,cam_tilt))
99
+
100
+        # Update the servos
101
+        # pan(cam_pan)
102
+        # tilt(cam_tilt)
103
+
104
+        break
105
+
106
+    # Display the image, with rectangle
107
+    # on the Pi desktop
108
+    #cv2.imshow('Video', frame)
109
+    b,g,r = cv2.split(frame)
110
+    img = cv2.merge((r,g,b))
111
+    im = Image.fromarray(img)
112
+    img = ImageTk.PhotoImage(image=im)
113
+    #img = ImageTk.PhotoImage(frame)
114
+    panel = tk.Label(window, image = img)
115
+
116
+
117
+    if cv2.waitKey(1) & 0xFF == ord('q'):
118
+        break
119
+
120
+# When everything is done, release the capture
121
+video_capture.release()
122
+cv2.destroyAllWindows()

+ 1505 - 0
lbpcascade_frontalface.xml

@@ -0,0 +1,1505 @@
1
+<?xml version="1.0"?>
2
+<!--
3
+number of positive samples 3000
4
+number of negative samples 1500
5
+-->
6
+<opencv_storage>
7
+<cascade type_id="opencv-cascade-classifier">
8
+  <stageType>BOOST</stageType>
9
+  <featureType>LBP</featureType>
10
+  <height>24</height>
11
+  <width>24</width>
12
+  <stageParams>
13
+    <boostType>GAB</boostType>
14
+    <minHitRate>0.9950000047683716</minHitRate>
15
+    <maxFalseAlarm>0.5000000000000000</maxFalseAlarm>
16
+    <weightTrimRate>0.9500000000000000</weightTrimRate>
17
+    <maxDepth>1</maxDepth>
18
+    <maxWeakCount>100</maxWeakCount></stageParams>
19
+  <featureParams>
20
+    <maxCatCount>256</maxCatCount></featureParams>
21
+  <stageNum>20</stageNum>
22
+  <stages>
23
+    <!-- stage 0 -->
24
+    <_>
25
+      <maxWeakCount>3</maxWeakCount>
26
+      <stageThreshold>-0.7520892024040222</stageThreshold>
27
+      <weakClassifiers>
28
+        <!-- tree 0 -->
29
+        <_>
30
+          <internalNodes>
31
+            0 -1 46 -67130709 -21569 -1426120013 -1275125205 -21585
32
+            -16385 587145899 -24005</internalNodes>
33
+          <leafValues>
34
+            -0.6543210148811340 0.8888888955116272</leafValues></_>
35
+        <!-- tree 1 -->
36
+        <_>
37
+          <internalNodes>
38
+            0 -1 13 -163512766 -769593758 -10027009 -262145 -514457854
39
+            -193593353 -524289 -1</internalNodes>
40
+          <leafValues>
41
+            -0.7739216089248657 0.7278633713722229</leafValues></_>
42
+        <!-- tree 2 -->
43
+        <_>
44
+          <internalNodes>
45
+            0 -1 2 -363936790 -893203669 -1337948010 -136907894
46
+            1088782736 -134217726 -741544961 -1590337</internalNodes>
47
+          <leafValues>
48
+            -0.7068563103675842 0.6761534214019775</leafValues></_></weakClassifiers></_>
49
+    <!-- stage 1 -->
50
+    <_>
51
+      <maxWeakCount>4</maxWeakCount>
52
+      <stageThreshold>-0.4872078299522400</stageThreshold>
53
+      <weakClassifiers>
54
+        <!-- tree 0 -->
55
+        <_>
56
+          <internalNodes>
57
+            0 -1 84 2147483647 1946124287 -536870913 2147450879
58
+            738132490 1061101567 243204619 2147446655</internalNodes>
59
+          <leafValues>
60
+            -0.8083735704421997 0.7685696482658386</leafValues></_>
61
+        <!-- tree 1 -->
62
+        <_>
63
+          <internalNodes>
64
+            0 -1 21 2147483647 263176079 1879048191 254749487 1879048191
65
+            -134252545 -268435457 801111999</internalNodes>
66
+          <leafValues>
67
+            -0.7698410153388977 0.6592915654182434</leafValues></_>
68
+        <!-- tree 2 -->
69
+        <_>
70
+          <internalNodes>
71
+            0 -1 106 -98110272 1610939566 -285484400 -850010381
72
+            -189334372 -1671954433 -571026695 -262145</internalNodes>
73
+          <leafValues>
74
+            -0.7506558895111084 0.5444605946540833</leafValues></_>
75
+        <!-- tree 3 -->
76
+        <_>
77
+          <internalNodes>
78
+            0 -1 48 -798690576 -131075 1095771153 -237144073 -65569 -1
79
+            -216727745 -69206049</internalNodes>
80
+          <leafValues>
81
+            -0.7775990366935730 0.5465461611747742</leafValues></_></weakClassifiers></_>
82
+    <!-- stage 2 -->
83
+    <_>
84
+      <maxWeakCount>4</maxWeakCount>
85
+      <stageThreshold>-1.1592328548431396</stageThreshold>
86
+      <weakClassifiers>
87
+        <!-- tree 0 -->
88
+        <_>
89
+          <internalNodes>
90
+            0 -1 47 -21585 -20549 -100818262 -738254174 -20561 -36865
91
+            -151016790 -134238549</internalNodes>
92
+          <leafValues>
93
+            -0.5601882934570313 0.7743113040924072</leafValues></_>
94
+        <!-- tree 1 -->
95
+        <_>
96
+          <internalNodes>
97
+            0 -1 12 -286003217 183435247 -268994614 -421330945
98
+            -402686081 1090387966 -286785545 -402653185</internalNodes>
99
+          <leafValues>
100
+            -0.6124526262283325 0.6978127956390381</leafValues></_>
101
+        <!-- tree 2 -->
102
+        <_>
103
+          <internalNodes>
104
+            0 -1 26 -50347012 970882927 -50463492 -1253377 -134218251
105
+            -50364513 -33619992 -172490753</internalNodes>
106
+          <leafValues>
107
+            -0.6114496588706970 0.6537628173828125</leafValues></_>
108
+        <!-- tree 3 -->
109
+        <_>
110
+          <internalNodes>
111
+            0 -1 8 -273 -135266321 1877977738 -2088243418 -134217987
112
+            2146926575 -18910642 1095231247</internalNodes>
113
+          <leafValues>
114
+            -0.6854077577590942 0.5403239130973816</leafValues></_></weakClassifiers></_>
115
+    <!-- stage 3 -->
116
+    <_>
117
+      <maxWeakCount>5</maxWeakCount>
118
+      <stageThreshold>-0.7562355995178223</stageThreshold>
119
+      <weakClassifiers>
120
+        <!-- tree 0 -->
121
+        <_>
122
+          <internalNodes>
123
+            0 -1 96 -1273 1870659519 -20971602 -67633153 -134250731
124
+            2004875127 -250 -150995969</internalNodes>
125
+          <leafValues>
126
+            -0.4051094949245453 0.7584033608436585</leafValues></_>
127
+        <!-- tree 1 -->
128
+        <_>
129
+          <internalNodes>
130
+            0 -1 33 -868162224 -76810262 -4262145 -257 1465211989
131
+            -268959873 -2656269 -524289</internalNodes>
132
+          <leafValues>
133
+            -0.7388162612915039 0.5340843200683594</leafValues></_>
134
+        <!-- tree 2 -->
135
+        <_>
136
+          <internalNodes>
137
+            0 -1 57 -12817 -49 -541103378 -152950 -38993 -20481 -1153876
138
+            -72478976</internalNodes>
139
+          <leafValues>
140
+            -0.6582943797111511 0.5339496731758118</leafValues></_>
141
+        <!-- tree 3 -->
142
+        <_>
143
+          <internalNodes>
144
+            0 -1 125 -269484161 -452984961 -319816180 -1594032130 -2111
145
+            -990117891 -488975296 -520947741</internalNodes>
146
+          <leafValues>
147
+            -0.5981323719024658 0.5323504805564880</leafValues></_>
148
+        <!-- tree 4 -->
149
+        <_>
150
+          <internalNodes>
151
+            0 -1 53 557787431 670265215 -1342193665 -1075892225
152
+            1998528318 1056964607 -33570977 -1</internalNodes>
153
+          <leafValues>
154
+            -0.6498787999153137 0.4913350641727448</leafValues></_></weakClassifiers></_>
155
+    <!-- stage 4 -->
156
+    <_>
157
+      <maxWeakCount>5</maxWeakCount>
158
+      <stageThreshold>-0.8085358142852783</stageThreshold>
159
+      <weakClassifiers>
160
+        <!-- tree 0 -->
161
+        <_>
162
+          <internalNodes>
163
+            0 -1 60 -536873708 880195381 -16842788 -20971521 -176687276
164
+            -168427659 -16777260 -33554626</internalNodes>
165
+          <leafValues>
166
+            -0.5278195738792419 0.6946372389793396</leafValues></_>
167
+        <!-- tree 1 -->
168
+        <_>
169
+          <internalNodes>
170
+            0 -1 7 -1 -62981529 -1090591130 805330978 -8388827 -41945787
171
+            -39577 -531118985</internalNodes>
172
+          <leafValues>
173
+            -0.5206505060195923 0.6329920291900635</leafValues></_>
174
+        <!-- tree 2 -->
175
+        <_>
176
+          <internalNodes>
177
+            0 -1 98 -725287348 1347747543 -852489 -16809993 1489881036
178
+            -167903241 -1 -1</internalNodes>
179
+          <leafValues>
180
+            -0.7516061067581177 0.4232024252414703</leafValues></_>
181
+        <!-- tree 3 -->
182
+        <_>
183
+          <internalNodes>
184
+            0 -1 44 -32777 1006582562 -65 935312171 -8388609 -1078198273
185
+            -1 733886267</internalNodes>
186
+          <leafValues>
187
+            -0.7639313936233521 0.4123568832874298</leafValues></_>
188
+        <!-- tree 4 -->
189
+        <_>
190
+          <internalNodes>
191
+            0 -1 24 -85474705 2138828511 -1036436754 817625855
192
+            1123369029 -58796809 -1013468481 -194513409</internalNodes>
193
+          <leafValues>
194
+            -0.5123769044876099 0.5791834592819214</leafValues></_></weakClassifiers></_>
195
+    <!-- stage 5 -->
196
+    <_>
197
+      <maxWeakCount>5</maxWeakCount>
198
+      <stageThreshold>-0.5549971461296082</stageThreshold>
199
+      <weakClassifiers>
200
+        <!-- tree 0 -->
201
+        <_>
202
+          <internalNodes>
203
+            0 -1 42 -17409 -20481 -268457797 -134239493 -17473 -1 -21829
204
+            -21846</internalNodes>
205
+          <leafValues>
206
+            -0.3763174116611481 0.7298233509063721</leafValues></_>
207
+        <!-- tree 1 -->
208
+        <_>
209
+          <internalNodes>
210
+            0 -1 6 -805310737 -2098262358 -269504725 682502698
211
+            2147483519 1740574719 -1090519233 -268472385</internalNodes>
212
+          <leafValues>
213
+            -0.5352765917778015 0.5659480094909668</leafValues></_>
214
+        <!-- tree 2 -->
215
+        <_>
216
+          <internalNodes>
217
+            0 -1 61 -67109678 -6145 -8 -87884584 -20481 -1073762305
218
+            -50856216 -16849696</internalNodes>
219
+          <leafValues>
220
+            -0.5678374171257019 0.4961479902267456</leafValues></_>
221
+        <!-- tree 3 -->
222
+        <_>
223
+          <internalNodes>
224
+            0 -1 123 -138428633 1002418167 -1359008245 -1908670465
225
+            -1346685918 910098423 -1359010520 -1346371657</internalNodes>
226
+          <leafValues>
227
+            -0.5706262588500977 0.4572288393974304</leafValues></_>
228
+        <!-- tree 4 -->
229
+        <_>
230
+          <internalNodes>
231
+            0 -1 9 -89138513 -4196353 1256531674 -1330665426 1216308261
232
+            -36190633 33498198 -151796633</internalNodes>
233
+          <leafValues>
234
+            -0.5344601869583130 0.4672054052352905</leafValues></_></weakClassifiers></_>
235
+    <!-- stage 6 -->
236
+    <_>
237
+      <maxWeakCount>5</maxWeakCount>
238
+      <stageThreshold>-0.8776460289955139</stageThreshold>
239
+      <weakClassifiers>
240
+        <!-- tree 0 -->
241
+        <_>
242
+          <internalNodes>
243
+            0 -1 105 1073769576 206601725 -34013449 -33554433 -789514004
244
+            -101384321 -690225153 -264193</internalNodes>
245
+          <leafValues>
246
+            -0.7700348496437073 0.5943940877914429</leafValues></_>
247
+        <!-- tree 1 -->
248
+        <_>
249
+          <internalNodes>
250
+            0 -1 30 -1432340997 -823623681 -49153 -34291724 -269484035
251
+            -1342767105 -1078198273 -1277955</internalNodes>
252
+          <leafValues>
253
+            -0.5043668746948242 0.6151274442672730</leafValues></_>
254
+        <!-- tree 2 -->
255
+        <_>
256
+          <internalNodes>
257
+            0 -1 35 -1067385040 -195758209 -436748425 -134217731
258
+            -50855988 -129 -1 -1</internalNodes>
259
+          <leafValues>
260
+            -0.6808040738105774 0.4667325913906097</leafValues></_>
261
+        <!-- tree 3 -->
262
+        <_>
263
+          <internalNodes>
264
+            0 -1 119 832534325 -34111555 -26050561 -423659521 -268468364
265
+            2105014143 -2114244 -17367185</internalNodes>
266
+          <leafValues>
267
+            -0.4927591383457184 0.5401885509490967</leafValues></_>
268
+        <!-- tree 4 -->
269
+        <_>
270
+          <internalNodes>
271
+            0 -1 82 -1089439888 -1080524865 2143059967 -1114121
272
+            -1140949004 -3 -2361356 -739516</internalNodes>
273
+          <leafValues>
274
+            -0.6445107460021973 0.4227822124958038</leafValues></_></weakClassifiers></_>
275
+    <!-- stage 7 -->
276
+    <_>
277
+      <maxWeakCount>6</maxWeakCount>
278
+      <stageThreshold>-1.1139287948608398</stageThreshold>
279
+      <weakClassifiers>
280
+        <!-- tree 0 -->
281
+        <_>
282
+          <internalNodes>
283
+            0 -1 52 -1074071553 -1074003969 -1 -1280135430 -5324817 -1
284
+            -335548482 582134442</internalNodes>
285
+          <leafValues>
286
+            -0.5307556986808777 0.6258179545402527</leafValues></_>
287
+        <!-- tree 1 -->
288
+        <_>
289
+          <internalNodes>
290
+            0 -1 99 -706937396 -705364068 -540016724 -570495027
291
+            -570630659 -587857963 -33628164 -35848193</internalNodes>
292
+          <leafValues>
293
+            -0.5227634310722351 0.5049746036529541</leafValues></_>
294
+        <!-- tree 2 -->
295
+        <_>
296
+          <internalNodes>
297
+            0 -1 18 -2035630093 42119158 -268503053 -1671444 261017599
298
+            1325432815 1954394111 -805306449</internalNodes>
299
+          <leafValues>
300
+            -0.4983572661876679 0.5106441378593445</leafValues></_>
301
+        <!-- tree 3 -->
302
+        <_>
303
+          <internalNodes>
304
+            0 -1 111 -282529488 -1558073088 1426018736 -170526448
305
+            -546832487 -5113037 -34243375 -570427929</internalNodes>
306
+          <leafValues>
307
+            -0.4990860521793366 0.5060507059097290</leafValues></_>
308
+        <!-- tree 4 -->
309
+        <_>
310
+          <internalNodes>
311
+            0 -1 92 1016332500 -606301707 915094269 -1080086049
312
+            -1837027144 -1361600280 2147318747 1067975613</internalNodes>
313
+          <leafValues>
314
+            -0.5695009231567383 0.4460467398166657</leafValues></_>
315
+        <!-- tree 5 -->
316
+        <_>
317
+          <internalNodes>
318
+            0 -1 51 -656420166 -15413034 -141599534 -603435836
319
+            1505950458 -787556946 -79823438 -1326199134</internalNodes>
320
+          <leafValues>
321
+            -0.6590405106544495 0.3616424500942230</leafValues></_></weakClassifiers></_>
322
+    <!-- stage 8 -->
323
+    <_>
324
+      <maxWeakCount>7</maxWeakCount>
325
+      <stageThreshold>-0.8243625760078430</stageThreshold>
326
+      <weakClassifiers>
327
+        <!-- tree 0 -->
328
+        <_>
329
+          <internalNodes>
330
+            0 -1 28 -901591776 -201916417 -262 -67371009 -143312112
331
+            -524289 -41943178 -1</internalNodes>
332
+          <leafValues>
333
+            -0.4972776770591736 0.6027074456214905</leafValues></_>
334
+        <!-- tree 1 -->
335
+        <_>
336
+          <internalNodes>
337
+            0 -1 112 -4507851 -411340929 -268437513 -67502145 -17350859
338
+            -32901 -71344315 -29377</internalNodes>
339
+          <leafValues>
340
+            -0.4383158981800079 0.5966237187385559</leafValues></_>
341
+        <!-- tree 2 -->
342
+        <_>
343
+          <internalNodes>
344
+            0 -1 69 -75894785 -117379438 -239063587 -12538500 1485072126
345
+            2076233213 2123118847 801906927</internalNodes>
346
+          <leafValues>
347
+            -0.6386105418205261 0.3977999985218048</leafValues></_>
348
+        <!-- tree 3 -->
349
+        <_>
350
+          <internalNodes>
351
+            0 -1 19 -823480413 786628589 -16876049 -1364262914 242165211
352
+            1315930109 -696268833 -455082829</internalNodes>
353
+          <leafValues>
354
+            -0.5512794256210327 0.4282079637050629</leafValues></_>
355
+        <!-- tree 4 -->
356
+        <_>
357
+          <internalNodes>
358
+            0 -1 73 -521411968 6746762 -1396236286 -2038436114
359
+            -185612509 57669627 -143132877 -1041235973</internalNodes>
360
+          <leafValues>
361
+            -0.6418755054473877 0.3549866080284119</leafValues></_>
362
+        <!-- tree 5 -->
363
+        <_>
364
+          <internalNodes>
365
+            0 -1 126 -478153869 1076028979 -1645895615 1365298272
366
+            -557859073 -339771473 1442574528 -1058802061</internalNodes>
367
+          <leafValues>
368
+            -0.4841901361942291 0.4668019413948059</leafValues></_>
369
+        <!-- tree 6 -->
370
+        <_>
371
+          <internalNodes>
372
+            0 -1 45 -246350404 -1650402048 -1610612745 -788400696
373
+            1467604861 -2787397 1476263935 -4481349</internalNodes>
374
+          <leafValues>
375
+            -0.5855734348297119 0.3879135847091675</leafValues></_></weakClassifiers></_>
376
+    <!-- stage 9 -->
377
+    <_>
378
+      <maxWeakCount>7</maxWeakCount>
379
+      <stageThreshold>-1.2237116098403931</stageThreshold>
380
+      <weakClassifiers>
381
+        <!-- tree 0 -->
382
+        <_>
383
+          <internalNodes>
384
+            0 -1 114 -24819 1572863935 -16809993 -67108865 2146778388
385
+            1433927541 -268608444 -34865205</internalNodes>
386
+          <leafValues>
387
+            -0.2518476545810700 0.7088654041290283</leafValues></_>
388
+        <!-- tree 1 -->
389
+        <_>
390
+          <internalNodes>
391
+            0 -1 97 -1841359 -134271049 -32769 -5767369 -1116675 -2185
392
+            -8231 -33603327</internalNodes>
393
+          <leafValues>
394
+            -0.4303432404994965 0.5283288359642029</leafValues></_>
395
+        <!-- tree 2 -->
396
+        <_>
397
+          <internalNodes>
398
+            0 -1 25 -1359507589 -1360593090 -1073778729 -269553812
399
+            -809512977 1744707583 -41959433 -134758978</internalNodes>
400
+          <leafValues>
401
+            -0.4259553551673889 0.5440809130668640</leafValues></_>
402
+        <!-- tree 3 -->
403
+        <_>
404
+          <internalNodes>
405
+            0 -1 34 729753407 -134270989 -1140907329 -235200777
406
+            658456383 2147467263 -1140900929 -16385</internalNodes>
407
+          <leafValues>
408
+            -0.5605589151382446 0.4220733344554901</leafValues></_>
409
+        <!-- tree 4 -->
410
+        <_>
411
+          <internalNodes>
412
+            0 -1 134 -310380553 -420675595 -193005472 -353568129
413
+            1205338070 -990380036 887604324 -420544526</internalNodes>
414
+          <leafValues>
415
+            -0.5192656517028809 0.4399855434894562</leafValues></_>
416
+        <!-- tree 5 -->
417
+        <_>
418
+          <internalNodes>
419
+            0 -1 16 -1427119361 1978920959 -287119734 -487068946
420
+            114759245 -540578051 -707510259 -671660453</internalNodes>
421
+          <leafValues>
422
+            -0.5013077259063721 0.4570254683494568</leafValues></_>
423
+        <!-- tree 6 -->
424
+        <_>
425
+          <internalNodes>
426
+            0 -1 74 -738463762 -889949281 -328301948 -121832450
427
+            -1142658284 -1863576559 2146417353 -263185</internalNodes>
428
+          <leafValues>
429
+            -0.4631414115428925 0.4790246188640595</leafValues></_></weakClassifiers></_>
430
+    <!-- stage 10 -->
431
+    <_>
432
+      <maxWeakCount>7</maxWeakCount>
433
+      <stageThreshold>-0.5544230937957764</stageThreshold>
434
+      <weakClassifiers>
435
+        <!-- tree 0 -->
436
+        <_>
437
+          <internalNodes>
438
+            0 -1 113 -76228780 -65538 -1 -67174401 -148007 -33 -221796
439
+            -272842924</internalNodes>
440
+          <leafValues>
441
+            -0.3949716091156006 0.6082032322883606</leafValues></_>
442
+        <!-- tree 1 -->
443
+        <_>
444
+          <internalNodes>
445
+            0 -1 110 369147696 -1625232112 2138570036 -1189900 790708019
446
+            -1212613127 799948719 -4456483</internalNodes>
447
+          <leafValues>
448
+            -0.4855885505676270 0.4785369932651520</leafValues></_>
449
+        <!-- tree 2 -->
450
+        <_>
451
+          <internalNodes>
452
+            0 -1 37 784215839 -290015241 536832799 -402984963
453
+            -1342414991 -838864897 -176769 -268456129</internalNodes>
454
+          <leafValues>
455
+            -0.4620285332202911 0.4989669024944305</leafValues></_>
456
+        <!-- tree 3 -->
457
+        <_>
458
+          <internalNodes>
459
+            0 -1 41 -486418688 -171915327 -340294900 -21938 -519766032
460
+            -772751172 -73096060 -585322623</internalNodes>
461
+          <leafValues>
462
+            -0.6420643329620361 0.3624351918697357</leafValues></_>
463
+        <!-- tree 4 -->
464
+        <_>
465
+          <internalNodes>
466
+            0 -1 117 -33554953 -475332625 -1423463824 -2077230421
467
+            -4849669 -2080505925 -219032928 -1071915349</internalNodes>
468
+          <leafValues>
469
+            -0.4820112884044647 0.4632140696048737</leafValues></_>
470
+        <!-- tree 5 -->
471
+        <_>
472
+          <internalNodes>
473
+            0 -1 65 -834130468 -134217476 -1349314083 -1073803559
474
+            -619913764 -1449131844 -1386890321 -1979118423</internalNodes>
475
+          <leafValues>
476
+            -0.4465552568435669 0.5061788558959961</leafValues></_>
477
+        <!-- tree 6 -->
478
+        <_>
479
+          <internalNodes>
480
+            0 -1 56 -285249779 1912569855 -16530 -1731022870 -1161904146
481
+            -1342177297 -268439634 -1464078708</internalNodes>
482
+          <leafValues>
483
+            -0.5190586447715759 0.4441480338573456</leafValues></_></weakClassifiers></_>
484
+    <!-- stage 11 -->
485
+    <_>
486
+      <maxWeakCount>7</maxWeakCount>
487
+      <stageThreshold>-0.7161560654640198</stageThreshold>
488
+      <weakClassifiers>
489
+        <!-- tree 0 -->
490
+        <_>
491
+          <internalNodes>
492
+            0 -1 20 1246232575 1078001186 -10027057 60102 -277348353
493
+            -43646987 -1210581153 1195769615</internalNodes>
494
+          <leafValues>
495
+            -0.4323809444904327 0.5663768053054810</leafValues></_>
496
+        <!-- tree 1 -->
497
+        <_>
498
+          <internalNodes>
499
+            0 -1 15 -778583572 -612921106 -578775890 -4036478
500
+            -1946580497 -1164766570 -1986687009 -12103599</internalNodes>
501
+          <leafValues>
502
+            -0.4588732719421387 0.4547033011913300</leafValues></_>
503
+        <!-- tree 2 -->
504
+        <_>
505
+          <internalNodes>
506
+            0 -1 129 -1073759445 2013231743 -1363169553 -1082459201
507
+            -1414286549 868185983 -1356133589 -1077936257</internalNodes>
508
+          <leafValues>
509
+            -0.5218553543090820 0.4111092388629913</leafValues></_>
510
+        <!-- tree 3 -->
511
+        <_>
512
+          <internalNodes>
513
+            0 -1 102 -84148365 -2093417722 -1204850272 564290299
514
+            -67121221 -1342177350 -1309195902 -776734797</internalNodes>
515
+          <leafValues>
516
+            -0.4920000731945038 0.4326725304126740</leafValues></_>
517
+        <!-- tree 4 -->
518
+        <_>
519
+          <internalNodes>
520
+            0 -1 88 -25694458 67104495 -290216278 -168563037 2083877442
521
+            1702788383 -144191964 -234882162</internalNodes>
522
+          <leafValues>
523
+            -0.4494568109512329 0.4448510706424713</leafValues></_>
524
+        <!-- tree 5 -->
525
+        <_>
526
+          <internalNodes>
527
+            0 -1 59 -857980836 904682741 -1612267521 232279415
528
+            1550862252 -574825221 -357380888 -4579409</internalNodes>
529
+          <leafValues>
530
+            -0.5180826783180237 0.3888972699642181</leafValues></_>
531
+        <!-- tree 6 -->
532
+        <_>
533
+          <internalNodes>
534
+            0 -1 27 -98549440 -137838400 494928389 -246013630 939541351
535
+            -1196072350 -620603549 2137216273</internalNodes>
536
+          <leafValues>
537
+            -0.6081240773200989 0.3333222270011902</leafValues></_></weakClassifiers></_>
538
+    <!-- stage 12 -->
539
+    <_>
540
+      <maxWeakCount>8</maxWeakCount>
541
+      <stageThreshold>-0.6743940711021423</stageThreshold>
542
+      <weakClassifiers>
543
+        <!-- tree 0 -->
544
+        <_>
545
+          <internalNodes>
546
+            0 -1 29 -150995201 2071191945 -1302151626 536934335
547
+            -1059008937 914128709 1147328110 -268369925</internalNodes>
548
+          <leafValues>
549
+            -0.1790193915367127 0.6605972051620483</leafValues></_>
550
+        <!-- tree 1 -->
551
+        <_>
552
+          <internalNodes>
553
+            0 -1 128 -134509479 1610575703 -1342177289 1861484541
554
+            -1107833788 1577058173 -333558568 -136319041</internalNodes>
555
+          <leafValues>
556
+            -0.3681024610996246 0.5139749646186829</leafValues></_>
557
+        <!-- tree 2 -->
558
+        <_>
559
+          <internalNodes>
560
+            0 -1 70 -1 1060154476 -1090984524 -630918524 -539492875
561
+            779616255 -839568424 -321</internalNodes>
562
+          <leafValues>
563
+            -0.3217232525348663 0.6171553134918213</leafValues></_>
564
+        <!-- tree 3 -->
565
+        <_>
566
+          <internalNodes>
567
+            0 -1 4 -269562385 -285029906 -791084350 -17923776 235286671
568
+            1275504943 1344390399 -966276889</internalNodes>
569
+          <leafValues>
570
+            -0.4373284578323364 0.4358185231685638</leafValues></_>
571
+        <!-- tree 4 -->
572
+        <_>
573
+          <internalNodes>
574
+            0 -1 76 17825984 -747628419 595427229 1474759671 575672208
575
+            -1684005538 872217086 -1155858277</internalNodes>
576
+          <leafValues>
577
+            -0.4404836893081665 0.4601220190525055</leafValues></_>
578
+        <!-- tree 5 -->
579
+        <_>
580
+          <internalNodes>
581
+            0 -1 124 -336593039 1873735591 -822231622 -355795238
582
+            -470820869 -1997537409 -1057132384 -1015285005</internalNodes>
583
+          <leafValues>
584
+            -0.4294152259826660 0.4452161788940430</leafValues></_>
585
+        <!-- tree 6 -->
586
+        <_>
587
+          <internalNodes>
588
+            0 -1 54 -834212130 -593694721 -322142257 -364892500
589
+            -951029539 -302125121 -1615106053 -79249765</internalNodes>
590
+          <leafValues>
591
+            -0.3973052501678467 0.4854526817798615</leafValues></_>
592
+        <!-- tree 7 -->
593
+        <_>
594
+          <internalNodes>
595
+            0 -1 95 1342144479 2147431935 -33554561 -47873 -855685912 -1
596
+            1988052447 536827383</internalNodes>
597
+          <leafValues>
598
+            -0.7054683566093445 0.2697997391223908</leafValues></_></weakClassifiers></_>
599
+    <!-- stage 13 -->
600
+    <_>
601
+      <maxWeakCount>9</maxWeakCount>
602
+      <stageThreshold>-1.2042298316955566</stageThreshold>
603
+      <weakClassifiers>
604
+        <!-- tree 0 -->
605
+        <_>
606
+          <internalNodes>
607
+            0 -1 39 1431368960 -183437936 -537002499 -137497097
608
+            1560590321 -84611081 -2097193 -513</internalNodes>
609
+          <leafValues>
610
+            -0.5905947685241699 0.5101932883262634</leafValues></_>
611
+        <!-- tree 1 -->
612
+        <_>
613
+          <internalNodes>
614
+            0 -1 120 -1645259691 2105491231 2130706431 1458995007
615
+            -8567536 -42483883 -33780003 -21004417</internalNodes>
616
+          <leafValues>
617
+            -0.4449204802513123 0.4490709304809570</leafValues></_>
618
+        <!-- tree 2 -->
619
+        <_>
620
+          <internalNodes>
621
+            0 -1 89 -612381022 -505806938 -362027516 -452985106
622
+            275854917 1920431639 -12600561 -134221825</internalNodes>
623
+          <leafValues>
624
+            -0.4693818688392639 0.4061094820499420</leafValues></_>
625
+        <!-- tree 3 -->
626
+        <_>
627
+          <internalNodes>
628
+            0 -1 14 -805573153 -161 -554172679 -530519488 -16779441
629
+            2000682871 -33604275 -150997129</internalNodes>
630
+          <leafValues>
631
+            -0.3600351214408875 0.5056326985359192</leafValues></_>
632
+        <!-- tree 4 -->
633
+        <_>
634
+          <internalNodes>
635
+            0 -1 67 6192 435166195 1467449341 2046691505 -1608493775
636
+            -4755729 -1083162625 -71365637</internalNodes>
637
+          <leafValues>
638
+            -0.4459891915321350 0.4132415652275085</leafValues></_>
639
+        <!-- tree 5 -->
640
+        <_>
641
+          <internalNodes>
642
+            0 -1 86 -41689215 -3281034 1853357967 -420712635 -415924289
643
+            -270209208 -1088293113 -825311232</internalNodes>
644
+          <leafValues>
645
+            -0.4466069042682648 0.4135067760944367</leafValues></_>
646
+        <!-- tree 6 -->
647
+        <_>
648
+          <internalNodes>
649
+            0 -1 80 -117391116 -42203396 2080374461 -188709 -542008165
650
+            -356831940 -1091125345 -1073796897</internalNodes>
651
+          <leafValues>
652
+            -0.3394956290721893 0.5658645033836365</leafValues></_>
653
+        <!-- tree 7 -->
654
+        <_>
655
+          <internalNodes>
656
+            0 -1 75 -276830049 1378714472 -1342181951 757272098
657
+            1073740607 -282199241 -415761549 170896931</internalNodes>
658
+          <leafValues>
659
+            -0.5346512198448181 0.3584479391574860</leafValues></_>
660
+        <!-- tree 8 -->
661
+        <_>
662
+          <internalNodes>
663
+            0 -1 55 -796075825 -123166849 2113667055 -217530421
664
+            -1107432194 -16385 -806359809 -391188771</internalNodes>
665
+          <leafValues>
666
+            -0.4379335641860962 0.4123645126819611</leafValues></_></weakClassifiers></_>
667
+    <!-- stage 14 -->
668
+    <_>
669
+      <maxWeakCount>10</maxWeakCount>
670
+      <stageThreshold>-0.8402050137519836</stageThreshold>
671
+      <weakClassifiers>
672
+        <!-- tree 0 -->
673
+        <_>
674
+          <internalNodes>
675
+            0 -1 71 -890246622 15525883 -487690486 47116238 -1212319899
676
+            -1291847681 -68159890 -469829921</internalNodes>
677
+          <leafValues>
678
+            -0.2670986354351044 0.6014143228530884</leafValues></_>
679
+        <!-- tree 1 -->
680
+        <_>
681
+          <internalNodes>
682
+            0 -1 31 -1361180685 -1898008841 -1090588811 -285410071
683
+            -1074016265 -840443905 2147221487 -262145</internalNodes>
684
+          <leafValues>
685
+            -0.4149844348430634 0.4670888185501099</leafValues></_>
686
+        <!-- tree 2 -->
687
+        <_>
688
+          <internalNodes>
689
+            0 -1 40 1426190596 1899364271 2142731795 -142607505
690
+            -508232452 -21563393 -41960001 -65</internalNodes>
691
+          <leafValues>
692
+            -0.4985891580581665 0.3719584941864014</leafValues></_>
693
+        <!-- tree 3 -->
694
+        <_>
695
+          <internalNodes>
696
+            0 -1 109 -201337965 10543906 -236498096 -746195597
697
+            1974565825 -15204415 921907633 -190058309</internalNodes>
698
+          <leafValues>
699
+            -0.4568729996681213 0.3965812027454376</leafValues></_>
700
+        <!-- tree 4 -->
701
+        <_>
702
+          <internalNodes>
703
+            0 -1 130 -595026732 -656401928 -268649235 -571490699
704
+            -440600392 -133131 -358810952 -2004088646</internalNodes>
705
+          <leafValues>
706
+            -0.4770836830139160 0.3862601518630981</leafValues></_>
707
+        <!-- tree 5 -->
708
+        <_>
709
+          <internalNodes>
710
+            0 -1 66 941674740 -1107882114 1332789109 -67691015
711
+            -1360463693 -1556612430 -609108546 733546933</internalNodes>
712
+          <leafValues>
713
+            -0.4877715110778809 0.3778986334800720</leafValues></_>
714
+        <!-- tree 6 -->
715
+        <_>
716
+          <internalNodes>
717
+            0 -1 49 -17114945 -240061474 1552871558 -82775604 -932393844
718
+            -1308544889 -532635478 -99042357</internalNodes>
719
+          <leafValues>
720
+            -0.3721654713153839 0.4994400143623352</leafValues></_>
721
+        <!-- tree 7 -->
722
+        <_>
723
+          <internalNodes>
724
+            0 -1 133 -655906006 1405502603 -939205164 1884929228
725
+            -498859222 559417357 -1928559445 -286264385</internalNodes>
726
+          <leafValues>
727
+            -0.3934195041656494 0.4769641458988190</leafValues></_>
728
+        <!-- tree 8 -->
729
+        <_>
730
+          <internalNodes>
731
+            0 -1 0 -335837777 1860677295 -90 -1946186226 931096183
732
+            251612987 2013265917 -671232197</internalNodes>
733
+          <leafValues>
734
+            -0.4323300719261169 0.4342164099216461</leafValues></_>
735
+        <!-- tree 9 -->
736
+        <_>
737
+          <internalNodes>
738
+            0 -1 103 37769424 -137772680 374692301 2002666345 -536176194
739
+            -1644484728 807009019 1069089930</internalNodes>
740
+          <leafValues>
741
+            -0.4993278682231903 0.3665378093719482</leafValues></_></weakClassifiers></_>
742
+    <!-- stage 15 -->
743
+    <_>
744
+      <maxWeakCount>9</maxWeakCount>
745
+      <stageThreshold>-1.1974394321441650</stageThreshold>
746
+      <weakClassifiers>
747
+        <!-- tree 0 -->
748
+        <_>
749
+          <internalNodes>
750
+            0 -1 43 -5505 2147462911 2143265466 -4511070 -16450 -257
751
+            -201348440 -71333206</internalNodes>
752
+          <leafValues>
753
+            -0.3310225307941437 0.5624626278877258</leafValues></_>
754
+        <!-- tree 1 -->
755
+        <_>
756
+          <internalNodes>
757
+            0 -1 90 -136842268 -499330741 2015250980 -87107126
758
+            -641665744 -788524639 -1147864792 -134892563</internalNodes>
759
+          <leafValues>
760
+            -0.5266560912132263 0.3704403042793274</leafValues></_>
761
+        <!-- tree 2 -->
762
+        <_>
763
+          <internalNodes>
764
+            0 -1 104 -146800880 -1780368555 2111170033 -140904684
765
+            -16777551 -1946681885 -1646463595 -839131947</internalNodes>
766
+          <leafValues>
767
+            -0.4171888828277588 0.4540435671806335</leafValues></_>
768
+        <!-- tree 3 -->
769
+        <_>
770
+          <internalNodes>
771
+            0 -1 85 -832054034 -981663763 -301990281 -578814081
772
+            -932319000 -1997406723 -33555201 -69206017</internalNodes>
773
+          <leafValues>
774
+            -0.4556705355644226 0.3704262077808380</leafValues></_>
775
+        <!-- tree 4 -->
776
+        <_>
777
+          <internalNodes>
778
+            0 -1 24 -118492417 -1209026825 1119023838 -1334313353
779
+            1112948738 -297319313 1378887291 -139469193</internalNodes>
780
+          <leafValues>
781
+            -0.4182529747486115 0.4267231225967407</leafValues></_>
782
+        <!-- tree 5 -->
783
+        <_>
784
+          <internalNodes>
785
+            0 -1 78 -1714382628 -2353704 -112094959 -549613092
786
+            -1567058760 -1718550464 -342315012 -1074972227</internalNodes>
787
+          <leafValues>
788
+            -0.3625369668006897 0.4684656262397766</leafValues></_>
789
+        <!-- tree 6 -->
790
+        <_>
791
+          <internalNodes>
792
+            0 -1 5 -85219702 316836394 -33279 1904970288 2117267315
793
+            -260901769 -621461759 -88607770</internalNodes>
794
+          <leafValues>
795
+            -0.4742925167083740 0.3689507246017456</leafValues></_>
796
+        <!-- tree 7 -->
797
+        <_>
798
+          <internalNodes>
799
+            0 -1 11 -294654041 -353603585 -1641159686 -50331921
800
+            -2080899877 1145569279 -143132713 -152044037</internalNodes>
801
+          <leafValues>
802
+            -0.3666271567344666 0.4580127298831940</leafValues></_>
803
+        <!-- tree 8 -->
804
+        <_>
805
+          <internalNodes>
806
+            0 -1 32 1887453658 -638545712 -1877976819 -34320972
807
+            -1071067983 -661345416 -583338277 1060190561</internalNodes>
808
+          <leafValues>
809
+            -0.4567637443542481 0.3894708156585693</leafValues></_></weakClassifiers></_>
810
+    <!-- stage 16 -->
811
+    <_>
812
+      <maxWeakCount>9</maxWeakCount>
813
+      <stageThreshold>-0.5733128190040588</stageThreshold>
814
+      <weakClassifiers>
815
+        <!-- tree 0 -->
816
+        <_>
817
+          <internalNodes>
818
+            0 -1 122 -994063296 1088745462 -318837116 -319881377
819
+            1102566613 1165490103 -121679694 -134744129</internalNodes>
820
+          <leafValues>
821
+            -0.4055117964744568 0.5487945079803467</leafValues></_>
822
+        <!-- tree 1 -->
823
+        <_>
824
+          <internalNodes>
825
+            0 -1 68 -285233233 -538992907 1811935199 -369234005 -529
826
+            -20593 -20505 -1561401854</internalNodes>
827
+          <leafValues>
828
+            -0.3787897229194641 0.4532003402709961</leafValues></_>
829
+        <!-- tree 2 -->
830
+        <_>
831
+          <internalNodes>
832
+            0 -1 58 -1335245632 1968917183 1940861695 536816369
833
+            -1226071367 -570908176 457026619 1000020667</internalNodes>
834
+          <leafValues>
835
+            -0.4258328974246979 0.4202791750431061</leafValues></_>
836
+        <!-- tree 3 -->
837
+        <_>
838
+          <internalNodes>
839
+            0 -1 94 -1360318719 -1979797897 -50435249 -18646473
840
+            -608879292 -805306691 -269304244 -17840167</internalNodes>
841
+          <leafValues>
842
+            -0.4561023116111755 0.4002747833728790</leafValues></_>
843
+        <!-- tree 4 -->
844
+        <_>
845
+          <internalNodes>
846
+            0 -1 87 2062765935 -16449 -1275080721 -16406 45764335
847
+            -1090552065 -772846337 -570464322</internalNodes>
848
+          <leafValues>
849
+            -0.4314672648906708 0.4086346626281738</leafValues></_>
850
+        <!-- tree 5 -->
851
+        <_>
852
+          <internalNodes>
853
+            0 -1 127 -536896021 1080817663 -738234288 -965478709
854
+            -2082767969 1290855887 1993822934 -990381609</internalNodes>
855
+          <leafValues>
856
+            -0.4174543321132660 0.4249868988990784</leafValues></_>
857
+        <!-- tree 6 -->
858
+        <_>
859
+          <internalNodes>
860
+            0 -1 3 -818943025 168730891 -293610428 -79249354 669224671
861
+            621166734 1086506807 1473768907</internalNodes>
862
+          <leafValues>
863
+            -0.4321364760398865 0.4090838730335236</leafValues></_>
864
+        <!-- tree 7 -->
865
+        <_>
866
+          <internalNodes>
867
+            0 -1 79 -68895696 -67107736 -1414315879 -841676168
868
+            -619843344 -1180610531 -1081990469 1043203389</internalNodes>
869
+          <leafValues>
870
+            -0.5018386244773865 0.3702533841133118</leafValues></_>
871
+        <!-- tree 8 -->
872
+        <_>
873
+          <internalNodes>
874
+            0 -1 116 -54002134 -543485719 -2124882422 -1437445858
875
+            -115617074 -1195787391 -1096024366 -2140472445</internalNodes>
876
+          <leafValues>
877
+            -0.5037505626678467 0.3564981222152710</leafValues></_></weakClassifiers></_>
878
+    <!-- stage 17 -->
879
+    <_>
880
+      <maxWeakCount>9</maxWeakCount>
881
+      <stageThreshold>-0.4892596900463104</stageThreshold>
882
+      <weakClassifiers>
883
+        <!-- tree 0 -->
884
+        <_>
885
+          <internalNodes>
886
+            0 -1 132 -67113211 2003808111 1862135111 846461923 -2752
887
+            2002237273 -273154752 1937223539</internalNodes>
888
+          <leafValues>
889
+            -0.2448196411132813 0.5689709186553955</leafValues></_>
890
+        <!-- tree 1 -->
891
+        <_>
892
+          <internalNodes>
893
+            0 -1 62 1179423888 -78064940 -611839555 -539167899
894
+            -1289358360 -1650810108 -892540499 -1432827684</internalNodes>
895
+          <leafValues>
896
+            -0.4633283913135529 0.3587929606437683</leafValues></_>
897
+        <!-- tree 2 -->
898
+        <_>
899
+          <internalNodes>
900
+            0 -1 23 -285212705 -78450761 -656212031 -264050110 -27787425
901
+            -1334349961 -547662981 -135796924</internalNodes>
902
+          <leafValues>
903
+            -0.3731099069118500 0.4290455579757690</leafValues></_>
904
+        <!-- tree 3 -->
905
+        <_>
906
+          <internalNodes>
907
+            0 -1 77 341863476 403702016 -550588417 1600194541
908
+            -1080690735 951127993 -1388580949 -1153717473</internalNodes>
909
+          <leafValues>
910
+            -0.3658909499645233 0.4556473195552826</leafValues></_>
911
+        <!-- tree 4 -->
912
+        <_>
913
+          <internalNodes>
914
+            0 -1 22 -586880702 -204831512 -100644596 -39319550
915
+            -1191150794 705692513 457203315 -75806957</internalNodes>
916
+          <leafValues>
917
+            -0.5214384198188782 0.3221037387847900</leafValues></_>
918
+        <!-- tree 5 -->
919
+        <_>
920
+          <internalNodes>
921
+            0 -1 72 -416546870 545911370 -673716192 -775559454
922
+            -264113598 139424 -183369982 -204474641</internalNodes>
923
+          <leafValues>
924
+            -0.4289036989212036 0.4004956185817719</leafValues></_>
925
+        <!-- tree 6 -->
926
+        <_>
927
+          <internalNodes>
928
+            0 -1 50 -1026505020 -589692154 -1740499937 -1563770497
929
+            1348491006 -60710713 -1109853489 -633909413</internalNodes>
930
+          <leafValues>
931
+            -0.4621542394161224 0.3832748532295227</leafValues></_>
932
+        <!-- tree 7 -->
933
+        <_>
934
+          <internalNodes>
935
+            0 -1 108 -1448872304 -477895040 -1778390608 -772418127
936
+            -1789923416 -1612057181 -805306693 -1415842113</internalNodes>
937
+          <leafValues>
938
+            -0.3711548447608948 0.4612701535224915</leafValues></_>
939
+        <!-- tree 8 -->
940
+        <_>
941
+          <internalNodes>
942
+            0 -1 92 407905424 -582449988 52654751 -1294472 -285103725
943
+            -74633006 1871559083 1057955850</internalNodes>
944
+          <leafValues>
945
+            -0.5180652141571045 0.3205870389938355</leafValues></_></weakClassifiers></_>
946
+    <!-- stage 18 -->
947
+    <_>
948
+      <maxWeakCount>10</maxWeakCount>
949
+      <stageThreshold>-0.5911940932273865</stageThreshold>
950
+      <weakClassifiers>
951
+        <!-- tree 0 -->
952
+        <_>
953
+          <internalNodes>
954
+            0 -1 81 4112 -1259563825 -846671428 -100902460 1838164148
955
+            -74153752 -90653988 -1074263896</internalNodes>
956
+          <leafValues>
957
+            -0.2592592537403107 0.5873016119003296</leafValues></_>
958
+        <!-- tree 1 -->
959
+        <_>
960
+          <internalNodes>
961
+            0 -1 1 -285216785 -823206977 -1085589 -1081346 1207959293
962
+            1157103471 2097133565 -2097169</internalNodes>
963
+          <leafValues>
964
+            -0.3801195919513702 0.4718827307224274</leafValues></_>
965
+        <!-- tree 2 -->
966
+        <_>
967
+          <internalNodes>
968
+            0 -1 121 -12465 -536875169 2147478367 2130706303 -37765492
969
+            -866124467 -318782328 -1392509185</internalNodes>
970
+          <leafValues>
971
+            -0.3509117066860199 0.5094807147979736</leafValues></_>
972
+        <!-- tree 3 -->
973
+        <_>
974
+          <internalNodes>
975
+            0 -1 38 2147449663 -20741 -16794757 1945873146 -16710 -1
976
+            -8406341 -67663041</internalNodes>
977
+          <leafValues>
978
+            -0.4068757295608521 0.4130136370658875</leafValues></_>
979
+        <!-- tree 4 -->
980
+        <_>
981
+          <internalNodes>
982
+            0 -1 17 -155191713 866117231 1651407483 548272812 -479201468
983
+            -447742449 1354229504 -261884429</internalNodes>
984
+          <leafValues>
985
+            -0.4557141065597534 0.3539792001247406</leafValues></_>
986
+        <!-- tree 5 -->
987
+        <_>
988
+          <internalNodes>
989
+            0 -1 100 -225319378 -251682065 -492783986 -792341777
990
+            -1287261695 1393643841 -11274182 -213909521</internalNodes>
991
+          <leafValues>
992
+            -0.4117803275585175 0.4118592441082001</leafValues></_>
993
+        <!-- tree 6 -->
994
+        <_>
995
+          <internalNodes>
996
+            0 -1 63 -382220122 -2002072729 -51404800 -371201558
997
+            -923011069 -2135301457 -2066104743 -1042557441</internalNodes>
998
+          <leafValues>
999
+            -0.4008397758007050 0.4034757018089294</leafValues></_>
1000
+        <!-- tree 7 -->
1001
+        <_>
1002
+          <internalNodes>
1003
+            0 -1 101 -627353764 -48295149 1581203952 -436258614
1004
+            -105268268 -1435893445 -638126888 -1061107126</internalNodes>
1005
+          <leafValues>
1006
+            -0.5694189667701721 0.2964762747287750</leafValues></_>
1007
+        <!-- tree 8 -->
1008
+        <_>
1009
+          <internalNodes>
1010
+            0 -1 118 -8399181 1058107691 -621022752 -251003468 -12582915
1011
+            -574619739 -994397789 -1648362021</internalNodes>
1012
+          <leafValues>
1013
+            -0.3195341229438782 0.5294018983840942</leafValues></_>
1014
+        <!-- tree 9 -->
1015
+        <_>
1016
+          <internalNodes>
1017
+            0 -1 92 -348343812 -1078389516 1717960437 364735981
1018
+            -1783841602 -4883137 -457572354 -1076950384</internalNodes>
1019
+          <leafValues>
1020
+            -0.3365339040756226 0.5067458748817444</leafValues></_></weakClassifiers></_>
1021
+    <!-- stage 19 -->
1022
+    <_>
1023
+      <maxWeakCount>10</maxWeakCount>
1024
+      <stageThreshold>-0.7612916231155396</stageThreshold>
1025
+      <weakClassifiers>
1026
+        <!-- tree 0 -->
1027
+        <_>
1028
+          <internalNodes>
1029
+            0 -1 10 -1976661318 -287957604 -1659497122 -782068 43591089
1030
+            -453637880 1435470000 -1077438561</internalNodes>
1031
+          <leafValues>
1032
+            -0.4204545319080353 0.5165745615959168</leafValues></_>
1033
+        <!-- tree 1 -->
1034
+        <_>
1035
+          <internalNodes>
1036
+            0 -1 131 -67110925 14874979 -142633168 -1338923040
1037
+            2046713291 -2067933195 1473503712 -789579837</internalNodes>
1038
+          <leafValues>
1039
+            -0.3762553930282593 0.4075302779674530</leafValues></_>
1040
+        <!-- tree 2 -->
1041
+        <_>
1042
+          <internalNodes>
1043
+            0 -1 83 -272814301 -1577073 -1118685 -305156120 -1052289
1044
+            -1073813756 -538971154 -355523038</internalNodes>
1045
+          <leafValues>
1046
+            -0.4253497421741486 0.3728055357933044</leafValues></_>
1047
+        <!-- tree 3 -->
1048
+        <_>
1049
+          <internalNodes>
1050
+            0 -1 135 -2233 -214486242 -538514758 573747007 -159390971
1051
+            1994225489 -973738098 -203424005</internalNodes>
1052
+          <leafValues>
1053
+            -0.3601998090744019 0.4563256204128265</leafValues></_>
1054
+        <!-- tree 4 -->
1055
+        <_>
1056
+          <internalNodes>
1057
+            0 -1 115 -261031688 -1330369299 -641860609 1029570301
1058
+            -1306461192 -1196149518 -1529767778 683139823</internalNodes>
1059
+          <leafValues>
1060
+            -0.4034293889999390 0.4160816967487335</leafValues></_>
1061
+        <!-- tree 5 -->
1062
+        <_>
1063
+          <internalNodes>
1064
+            0 -1 64 -572993608 -34042628 -417865 -111109 -1433365268
1065
+            -19869715 -1920939864 -1279457063</internalNodes>
1066
+          <leafValues>
1067
+            -0.3620899617671967 0.4594142735004425</leafValues></_>
1068
+        <!-- tree 6 -->
1069
+        <_>
1070
+          <internalNodes>
1071
+            0 -1 36 -626275097 -615256993 1651946018 805366393
1072
+            2016559730 -430780849 -799868165 -16580645</internalNodes>
1073
+          <leafValues>
1074
+            -0.3903816640377045 0.4381459355354309</leafValues></_>
1075
+        <!-- tree 7 -->
1076
+        <_>
1077
+          <internalNodes>
1078
+            0 -1 93 1354797300 -1090957603 1976418270 -1342502178
1079
+            -1851873892 -1194637077 -1153521668 -1108399474</internalNodes>
1080
+          <leafValues>
1081
+            -0.3591445386409760 0.4624078869819641</leafValues></_>
1082
+        <!-- tree 8 -->
1083
+        <_>
1084
+          <internalNodes>
1085
+            0 -1 91 68157712 1211368313 -304759523 1063017136 798797750
1086
+            -275513546 648167355 -1145357350</internalNodes>
1087
+          <leafValues>
1088
+            -0.4297670423984528 0.4023293554782867</leafValues></_>
1089
+        <!-- tree 9 -->
1090
+        <_>
1091
+          <internalNodes>
1092
+            0 -1 107 -546318240 -1628569602 -163577944 -537002306
1093
+            -545456389 -1325465645 -380446736 -1058473386</internalNodes>
1094
+          <leafValues>
1095
+            -0.5727006793022156 0.2995934784412384</leafValues></_></weakClassifiers></_></stages>
1096
+  <features>
1097
+    <_>
1098
+      <rect>
1099
+        0 0 3 5</rect></_>
1100
+    <_>
1101
+      <rect>
1102
+        0 0 4 2</rect></_>
1103
+    <_>
1104
+      <rect>
1105
+        0 0 6 3</rect></_>
1106
+    <_>
1107
+      <rect>
1108
+        0 1 2 3</rect></_>
1109
+    <_>
1110
+      <rect>
1111
+        0 1 3 3</rect></_>
1112
+    <_>
1113
+      <rect>
1114
+        0 1 3 7</rect></_>
1115
+    <_>
1116
+      <rect>
1117
+        0 4 3 3</rect></_>
1118
+    <_>
1119
+      <rect>
1120
+        0 11 3 4</rect></_>
1121
+    <_>
1122
+      <rect>
1123
+        0 12 8 4</rect></_>
1124
+    <_>
1125
+      <rect>
1126
+        0 14 4 3</rect></_>
1127
+    <_>
1128
+      <rect>
1129
+        1 0 5 3</rect></_>
1130
+    <_>
1131
+      <rect>
1132
+        1 1 2 2</rect></_>
1133
+    <_>
1134
+      <rect>
1135
+        1 3 3 1</rect></_>
1136
+    <_>
1137
+      <rect>
1138
+        1 7 4 4</rect></_>
1139
+    <_>
1140
+      <rect>
1141
+        1 12 2 2</rect></_>
1142
+    <_>
1143
+      <rect>
1144
+        1 13 4 1</rect></_>
1145
+    <_>
1146
+      <rect>
1147
+        1 14 4 3</rect></_>
1148
+    <_>
1149
+      <rect>
1150
+        1 17 3 2</rect></_>
1151
+    <_>
1152
+      <rect>
1153
+        2 0 2 3</rect></_>
1154
+    <_>
1155
+      <rect>
1156
+        2 1 2 2</rect></_>
1157
+    <_>
1158
+      <rect>
1159
+        2 2 4 6</rect></_>
1160
+    <_>
1161
+      <rect>
1162
+        2 3 4 4</rect></_>
1163
+    <_>
1164
+      <rect>
1165
+        2 7 2 1</rect></_>
1166
+    <_>
1167
+      <rect>
1168
+        2 11 2 3</rect></_>
1169
+    <_>
1170
+      <rect>
1171
+        2 17 3 2</rect></_>
1172
+    <_>
1173
+      <rect>
1174
+        3 0 2 2</rect></_>
1175
+    <_>
1176
+      <rect>
1177
+        3 1 7 3</rect></_>
1178
+    <_>
1179
+      <rect>
1180
+        3 7 2 1</rect></_>
1181
+    <_>
1182
+      <rect>
1183
+        3 7 2 4</rect></_>
1184
+    <_>
1185
+      <rect>
1186
+        3 18 2 2</rect></_>
1187
+    <_>
1188
+      <rect>
1189
+        4 0 2 3</rect></_>
1190
+    <_>
1191
+      <rect>
1192
+        4 3 2 1</rect></_>
1193
+    <_>
1194
+      <rect>
1195
+        4 6 2 1</rect></_>
1196
+    <_>
1197
+      <rect>
1198
+        4 6 2 5</rect></_>
1199
+    <_>
1200
+      <rect>
1201
+        4 7 5 2</rect></_>
1202
+    <_>
1203
+      <rect>
1204
+        4 8 4 3</rect></_>
1205
+    <_>
1206
+      <rect>
1207
+        4 18 2 2</rect></_>
1208
+    <_>
1209
+      <rect>
1210
+        5 0 2 2</rect></_>
1211
+    <_>
1212
+      <rect>
1213
+        5 3 4 4</rect></_>
1214
+    <_>
1215
+      <rect>
1216
+        5 6 2 5</rect></_>
1217
+    <_>
1218
+      <rect>
1219
+        5 9 2 2</rect></_>
1220
+    <_>
1221
+      <rect>
1222
+        5 10 2 2</rect></_>
1223
+    <_>
1224
+      <rect>
1225
+        6 3 4 4</rect></_>
1226
+    <_>
1227
+      <rect>
1228
+        6 4 4 3</rect></_>
1229
+    <_>
1230
+      <rect>
1231
+        6 5 2 3</rect></_>
1232
+    <_>
1233
+      <rect>
1234
+        6 5 2 5</rect></_>
1235
+    <_>
1236
+      <rect>
1237
+        6 5 4 3</rect></_>
1238
+    <_>
1239
+      <rect>
1240
+        6 6 4 2</rect></_>
1241
+    <_>
1242
+      <rect>
1243
+        6 6 4 4</rect></_>
1244
+    <_>
1245
+      <rect>
1246
+        6 18 1 2</rect></_>
1247
+    <_>
1248
+      <rect>
1249
+        6 21 2 1</rect></_>
1250
+    <_>
1251
+      <rect>
1252
+        7 0 3 7</rect></_>
1253
+    <_>
1254
+      <rect>
1255
+        7 4 2 3</rect></_>
1256
+    <_>
1257
+      <rect>
1258
+        7 9 5 1</rect></_>
1259
+    <_>
1260
+      <rect>
1261
+        7 21 2 1</rect></_>
1262
+    <_>
1263
+      <rect>
1264
+        8 0 1 4</rect></_>
1265
+    <_>
1266
+      <rect>
1267
+        8 5 2 2</rect></_>
1268
+    <_>
1269
+      <rect>
1270
+        8 5 3 2</rect></_>
1271
+    <_>
1272
+      <rect>
1273
+        8 17 3 1</rect></_>
1274
+    <_>
1275
+      <rect>
1276
+        8 18 1 2</rect></_>
1277
+    <_>
1278
+      <rect>
1279
+        9 0 5 3</rect></_>
1280
+    <_>
1281
+      <rect>
1282
+        9 2 2 6</rect></_>
1283
+    <_>
1284
+      <rect>
1285
+        9 5 1 1</rect></_>
1286
+    <_>
1287
+      <rect>
1288
+        9 11 1 1</rect></_>
1289
+    <_>
1290
+      <rect>
1291
+        9 16 1 1</rect></_>
1292
+    <_>
1293
+      <rect>
1294
+        9 16 2 1</rect></_>
1295
+    <_>
1296
+      <rect>
1297
+        9 17 1 1</rect></_>
1298
+    <_>
1299
+      <rect>
1300
+        9 18 1 1</rect></_>
1301
+    <_>
1302
+      <rect>
1303
+        10 5 1 2</rect></_>
1304
+    <_>
1305
+      <rect>
1306
+        10 5 3 3</rect></_>
1307
+    <_>
1308
+      <rect>
1309
+        10 7 1 5</rect></_>
1310
+    <_>
1311
+      <rect>
1312
+        10 8 1 1</rect></_>
1313
+    <_>
1314
+      <rect>
1315
+        10 9 1 1</rect></_>
1316
+    <_>
1317
+      <rect>
1318
+        10 10 1 1</rect></_>
1319
+    <_>
1320
+      <rect>
1321
+        10 10 1 2</rect></_>
1322
+    <_>
1323
+      <rect>
1324
+        10 14 3 3</rect></_>
1325
+    <_>
1326
+      <rect>
1327
+        10 15 1 1</rect></_>
1328
+    <_>
1329
+      <rect>
1330
+        10 15 2 1</rect></_>
1331
+    <_>
1332
+      <rect>
1333
+        10 16 1 1</rect></_>
1334
+    <_>
1335
+      <rect>
1336
+        10 16 2 1</rect></_>
1337
+    <_>
1338
+      <rect>
1339
+        10 17 1 1</rect></_>
1340
+    <_>
1341
+      <rect>
1342
+        10 21 1 1</rect></_>
1343
+    <_>
1344
+      <rect>
1345
+        11 3 2 2</rect></_>
1346
+    <_>
1347
+      <rect>
1348
+        11 5 1 2</rect></_>
1349
+    <_>
1350
+      <rect>
1351
+        11 5 3 3</rect></_>
1352
+    <_>
1353
+      <rect>
1354
+        11 5 4 6</rect></_>
1355
+    <_>
1356
+      <rect>
1357
+        11 6 1 1</rect></_>
1358
+    <_>
1359
+      <rect>
1360
+        11 7 2 2</rect></_>
1361
+    <_>
1362
+      <rect>
1363
+        11 8 1 2</rect></_>
1364
+    <_>
1365
+      <rect>
1366
+        11 10 1 1</rect></_>
1367
+    <_>
1368
+      <rect>
1369
+        11 10 1 2</rect></_>
1370
+    <_>
1371
+      <rect>
1372
+        11 15 1 1</rect></_>
1373
+    <_>
1374
+      <rect>
1375
+        11 17 1 1</rect></_>
1376
+    <_>
1377
+      <rect>
1378
+        11 18 1 1</rect></_>
1379
+    <_>
1380
+      <rect>
1381
+        12 0 2 2</rect></_>
1382
+    <_>
1383
+      <rect>
1384
+        12 1 2 5</rect></_>
1385
+    <_>
1386
+      <rect>
1387
+        12 2 4 1</rect></_>
1388
+    <_>
1389
+      <rect>
1390
+        12 3 1 3</rect></_>
1391
+    <_>
1392
+      <rect>
1393
+        12 7 3 4</rect></_>
1394
+    <_>
1395
+      <rect>
1396
+        12 10 3 2</rect></_>
1397
+    <_>
1398
+      <rect>
1399
+        12 11 1 1</rect></_>
1400
+    <_>
1401
+      <rect>
1402
+        12 12 3 2</rect></_>
1403
+    <_>
1404
+      <rect>
1405
+        12 14 4 3</rect></_>
1406
+    <_>
1407
+      <rect>
1408
+        12 17 1 1</rect></_>
1409
+    <_>
1410
+      <rect>
1411
+        12 21 2 1</rect></_>
1412
+    <_>
1413
+      <rect>
1414
+        13 6 2 5</rect></_>
1415
+    <_>
1416
+      <rect>
1417
+        13 7 3 5</rect></_>
1418
+    <_>
1419
+      <rect>
1420
+        13 11 3 2</rect></_>
1421
+    <_>
1422
+      <rect>
1423
+        13 17 2 2</rect></_>
1424
+    <_>
1425
+      <rect>
1426
+        13 17 3 2</rect></_>
1427
+    <_>
1428
+      <rect>
1429
+        13 18 1 2</rect></_>
1430
+    <_>
1431
+      <rect>
1432
+        13 18 2 2</rect></_>
1433
+    <_>
1434
+      <rect>
1435
+        14 0 2 2</rect></_>
1436
+    <_>
1437
+      <rect>
1438
+        14 1 1 3</rect></_>
1439
+    <_>
1440
+      <rect>
1441
+        14 2 3 2</rect></_>
1442
+    <_>
1443
+      <rect>
1444
+        14 7 2 1</rect></_>
1445
+    <_>
1446
+      <rect>
1447
+        14 13 2 1</rect></_>
1448
+    <_>
1449
+      <rect>
1450
+        14 13 3 3</rect></_>
1451
+    <_>
1452
+      <rect>
1453
+        14 17 2 2</rect></_>
1454
+    <_>
1455
+      <rect>
1456
+        15 0 2 2</rect></_>
1457
+    <_>
1458
+      <rect>
1459
+        15 0 2 3</rect></_>
1460
+    <_>
1461
+      <rect>
1462
+        15 4 3 2</rect></_>
1463
+    <_>
1464
+      <rect>
1465
+        15 4 3 6</rect></_>
1466
+    <_>
1467
+      <rect>
1468
+        15 6 3 2</rect></_>
1469
+    <_>
1470
+      <rect>
1471
+        15 11 3 4</rect></_>
1472
+    <_>
1473
+      <rect>
1474
+        15 13 3 2</rect></_>
1475
+    <_>
1476
+      <rect>
1477
+        15 17 2 2</rect></_>
1478
+    <_>
1479
+      <rect>
1480
+        15 17 3 2</rect></_>
1481
+    <_>
1482
+      <rect>
1483
+        16 1 2 3</rect></_>
1484
+    <_>
1485
+      <rect>
1486
+        16 3 2 4</rect></_>
1487
+    <_>
1488
+      <rect>
1489
+        16 6 1 1</rect></_>
1490
+    <_>
1491
+      <rect>
1492
+        16 16 2 2</rect></_>
1493
+    <_>
1494
+      <rect>
1495
+        17 1 2 2</rect></_>
1496
+    <_>
1497
+      <rect>
1498
+        17 1 2 5</rect></_>
1499
+    <_>
1500
+      <rect>
1501
+        17 12 2 2</rect></_>
1502
+    <_>
1503
+      <rect>
1504
+        18 0 2 2</rect></_></features></cascade>
1505
+</opencv_storage>