-
Notifications
You must be signed in to change notification settings - Fork 4
/
person_detecetion_inVideo.py
56 lines (46 loc) · 1.71 KB
/
person_detecetion_inVideo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import cv2
import datetime
import imutils
import numpy as np
modelpath="MobileNetSSD_deploy.caffemodel"
protopath="MobileNetSSD_deploy.prototxt"
detector=cv2.dnn.readNetFromCaffe(prototxt=protopath,caffeModel=modelpath)
CLASSES=["background", "aeroplane", "bicycle", "bird", "boat",
"bottle", "bus", "car", "cat", "chair", "cow", "diningtable",
"dog", "horse", "motorbike", "person", "pottedplant", "sheep",
"sofa", "train", "tvmonitor"]
cap=cv2.VideoCapture('bts.mp4')
fps_start_time=datetime.datetime.now()
fps=0
total_frames=0
while True:
ret, frame = cap.read()
frame = imutils.resize(frame, width=600)
total_frames=total_frames+1
(H, W) = frame.shape[:2]
blob = cv2.dnn.blobFromImage(frame, 0.007843, (H, W), 127.5)
detector.setInput(blob)
person_detections = detector.forward()
for i in np.arange(0, person_detections.shape[2]):
confidence = person_detections[0, 0, i, 2]
if confidence > 0.5:
idx = int(person_detections[0, 0, i, 1])
if CLASSES[idx] != "person":
continue
person_box = person_detections[0, 0, i, 3:7] * np.array([W, H, W, H])
(startX, startY, endX, endY) = person_box.astype("int")
cv2.rectangle(frame, (startX, startY), (endX, endY), (0, 255, 0), 2)
fps_end_time=datetime.datetime.now()
time_diff=fps_end_time-fps_start_time
if time_diff.seconds==0:
fps=0.0
else:
fps = (total_frames / time_diff.seconds)
fps_text="FPS: {:.2f}".format(fps)
cv2.putText(frame,fps_text,(5,30),cv2.FONT_HERSHEY_COMPLEX,1,(0,0,255),1)
cv2.imshow("APPLICATION",frame)
key=cv2.waitKey(1)
if(key==ord('q')):
break
cap.release()
cv2.destroyAllWindows()