]> defiant.homedns.org Git - ros_wild_thumper.git/commitdiff
Added camshift test script
authorErik Andresen <erik@vontaene.de>
Sat, 25 Nov 2017 11:13:41 +0000 (12:13 +0100)
committerErik Andresen <erik@vontaene.de>
Sat, 25 Nov 2017 11:13:41 +0000 (12:13 +0100)
scripts/camshift.py [new file with mode: 0755]

diff --git a/scripts/camshift.py b/scripts/camshift.py
new file mode 100755 (executable)
index 0000000..6c901ad
--- /dev/null
@@ -0,0 +1,124 @@
+#!/usr/bin/env python
+
+'''
+Camshift tracker
+================
+
+This is a demo that shows mean-shift based tracking
+You select a color objects such as your face and it tracks it.
+This reads from video camera (0 by default, or the camera number the user enters)
+
+http://www.robinhewitt.com/research/track/camshift.html
+
+Usage:
+------
+       camshift.py [<video source>]
+
+       To initialize tracking, select the object with mouse
+
+Keys:
+-----
+       ESC   - exit
+       b        - toggle back-projected probability visualization
+'''
+
+import rospy
+from sensor_msgs.msg import Image
+import numpy as np
+import cv2
+from cv_bridge import CvBridge, CvBridgeError
+
+class App:
+       def __init__(self):
+               rospy.init_node('camshift')
+               rospy.Subscriber("/usb_cam/image_raw", Image, self.cmdImageReceived)
+               self.bridge = CvBridge()
+
+               self.first_run = True
+               self.selection = None
+               self.drag_start = None
+               self.tracking_state = 0
+               self.show_backproj = False
+       
+       def onmouse(self, event, x, y, flags, param):
+               x, y = np.int16([x, y]) # BUG
+               if event == cv2.EVENT_LBUTTONDOWN:
+                       self.drag_start = (x, y)
+                       self.tracking_state = 0
+               if self.drag_start:
+                       if event == cv2.EVENT_LBUTTONUP:
+                               self.drag_start = None
+                               if self.selection is not None:
+                                       self.tracking_state = 1
+                       elif flags & cv2.EVENT_FLAG_LBUTTON:
+                               h, w = self.frame.shape[:2]
+                               xo, yo = self.drag_start
+                               x0, y0 = np.maximum(0, np.minimum([xo, yo], [x, y]))
+                               x1, y1 = np.minimum([w, h], np.maximum([xo, yo], [x, y]))
+                               self.selection = None
+                               if x1-x0 > 0 and y1-y0 > 0:
+                                       self.selection = (x0, y0, x1, y1)
+
+       def show_hist(self):
+               bin_count = self.hist.shape[0]
+               bin_w = 24
+               img = np.zeros((256, bin_count*bin_w, 3), np.uint8)
+               for i in xrange(bin_count):
+                       h = int(self.hist[i])
+                       cv2.rectangle(img, (i*bin_w+2, 255), ((i+1)*bin_w-2, 255-h), (int(180.0*i/bin_count), 255, 255), -1)
+               img = cv2.cvtColor(img, cv2.COLOR_HSV2BGR)
+               cv2.imshow('hist', img)
+
+       def run(self):
+               rospy.spin()
+               cv2.destroyAllWindows()
+       
+       def cmdImageReceived(self, msg):
+               if self.first_run:
+                       cv2.namedWindow('camshift')
+                       cv2.namedWindow('hist')
+                       cv2.setMouseCallback('camshift', self.onmouse)
+                       self.first_run = False
+
+               self.frame = self.bridge.imgmsg_to_cv2(msg, desired_encoding="passthrough")
+               vis = self.frame.copy()
+               hsv = cv2.cvtColor(self.frame, cv2.COLOR_BGR2HSV)
+               mask = cv2.inRange(hsv, np.array((0., 60., 32.)), np.array((180., 255., 255.)))
+
+               if self.selection:
+                       x0, y0, x1, y1 = self.selection
+                       self.track_window = (x0, y0, x1-x0, y1-y0)
+                       hsv_roi = hsv[y0:y1, x0:x1]
+                       mask_roi = mask[y0:y1, x0:x1]
+                       hist = cv2.calcHist( [hsv_roi], [0], mask_roi, [16], [0, 180] )
+                       cv2.normalize(hist, hist, 0, 255, cv2.NORM_MINMAX);
+                       self.hist = hist.reshape(-1)
+                       self.show_hist()
+
+                       vis_roi = vis[y0:y1, x0:x1]
+                       cv2.bitwise_not(vis_roi, vis_roi)
+                       vis[mask == 0] = 0
+
+               if self.tracking_state == 1 and self.track_window != (0, 0, 0, 0):
+                       self.selection = None
+                       prob = cv2.calcBackProject([hsv], [0], self.hist, [0, 180], 1)
+                       prob &= mask
+                       term_crit = ( cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 1 )
+                       track_box, self.track_window = cv2.CamShift(prob, self.track_window, term_crit)
+
+                       if self.show_backproj:
+                               vis[:] = prob[...,np.newaxis]
+                       try: cv2.ellipse(vis, track_box, (0, 0, 255), 2)
+                       except: print track_box
+
+               cv2.imshow('camshift', vis)
+
+               ch = 0xFF & cv2.waitKey(5)
+               if ch == 27:
+                       rospy.signal_shutdown("Exit from keyboard")
+               if ch == ord('b'):
+                       self.show_backproj = not self.show_backproj
+
+
+if __name__ == '__main__':
+       App().run()