1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
|
#!/usr/bin/python
import cv2
import time
import logging
import dbus
session_bus = dbus.SessionBus()
notif_id = id(__name__) % (2**32)
def get_dbus_object(dest, path, iface):
proxy = session_bus.get_object(dest, path)
return dbus.Interface(proxy, iface)
def notify(summary, body):
notif = get_dbus_object("org.freedesktop.Notifications",
"/org/freedesktop/Notifications",
"org.freedesktop.Notifications")
notif.Notify("autolock", notif_id, "", summary, body, [], {}, 1)
def close_notification():
notif = get_dbus_object("org.freedesktop.Notifications",
"/org/freedesktop/Notifications",
"org.freedesktop.Notifications")
notif.CloseNotification(notif_id)
def tryLock():
try:
screensaver = get_dbus_object("org.mate.ScreenSaver",
"/org/mate/ScreenSaver",
"org.mate.ScreenSaver")
if screensaver.Hello() == "org.mate.ScreenSaver":
if not screensaver.GetActive():
logging.info("call Lock()")
screensaver.Lock()
else:
logging.debug("Is locked already")
else:
logging.error("Mate-Screensaver doesn't answer our call")
except:
pass
def detect(show_window=False):
haar_cascade = cv2.CascadeClassifier("xml/haarcascade_eye.xml")
capture = cv2.VideoCapture(0)
last_seen = time.time()
last_analysis = last_seen
try:
while True:
ret = capture.grab()
if time.time() - last_analysis < 0.5:
continue
ret, frame = capture.retrieve()
last_analysis = time.time()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
matches = haar_cascade.detectMultiScale(gray, 1.3, 5)
if len(matches) >= 1:
close_notification()
last_seen = time.time()
else:
timeout = (last_seen - time.time()) + 12
if timeout <= 10:
notify("Autolocker:",
"Activate screenlock in {:.0f}s".format(timeout))
if timeout <= 0:
tryLock()
if show_window:
gray = cv2.GaussianBlur(gray, (5, 5), 0)
gray = cv2.adaptiveThreshold(gray, 255,
cv2.ADAPTIVE_THRESH_GAUSSIAN_C,
cv2.THRESH_BINARY, 11, 2)
for (x, y, w, h) in matches:
cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 0, 0), 2)
# roi = gray[y:(y+h), x:(x+w)]
# if not roi != []: continue
# print (x, y, w, h)
# det = cv2.SimpleBlobDetector()
# for blob in det.detect(roi):
# cv2.circle(frame, (x+int(blob.pt[0]), y+int(blob.pt[1])), int(blob.size), (255,0,0))
# circles = cv2.HoughCircles(roi, cv2.cv.CV_HOUGH_GRADIENT, 1, 20,
# param1=40,param2=15,minRadius=4,maxRadius=min(w,h)/2)
# if circles != None and len(circles) > 0:
# circles = np.uint16(np.around(circles))
# for circle in circles[0]:
# center = (x + int(circle[0]), y + int(circle[1]))
# cv2.circle(frame, center, int(circle[2]), (0,255,255))
cv2.imshow("img", frame)
cv2.imshow("gray", gray)
key = cv2.waitKey(1)
ctrl_mask = 0x40000
if key in [ctrl_mask | ord("q"), ctrl_mask | ord("w"), 27]:
break
else:
time.sleep(0.3)
except KeyboardInterrupt:
pass
capture.release()
if show_window:
cv2.destroyAllWindows()
if __name__ == "__main__":
detect(True)
|