2017-05-24 4 views
1

J'essaye d'appliquer le filtre de Kalman avec opencv en python pour suivre la position d'une balle. Je peux déjà le détecter mais il y a encore du bruit que je veux éliminer. Il y a deux variables que je mesure - position x et y - et il y a quatre variables que j'aimerais obtenir - la position x et y et la vitesse x et y - mais je n'en obtiens aucune. Quand j'affiche x0, y0, vy et vx sur l'écran, j'obtiens "[.0]".Aucune sortie du filtre de Kalman dans le suivi d'objet 2D

Un autre problème est que je ne peux pas appliquer la matrice de contrôle à la fonction kalman.predict() parce que je reçois l'erreur suivante:

OpenCV Error: Assertion failed (a_size.width == len) in gemm, file /tmp/opencv3-20170518-8732-1bjq2j7/opencv-3.2.0/modules/core/src/matmul.cpp, line 1537 
Traceback (most recent call last): 
File "kalman.py", line 128, in <module> 
kalmanout = kalman.predict(kalman.controlMatrix) 
cv2.error: /tmp/opencv3-20170518-8732-1bjq2j7/opencv-3.2.0/modules/core/src/matmul.cpp:1537: error: (-215) a_size.width == len in function ge 

Ceci est le morceau de code que je utilise pour le filtre de Kalman (pour le contrôle l'application de la matrice I = utiliser la ligne kalmanout kalman.predict (kalman.controlMatrix) à la fin:

# import the necessary packages 
from collections import deque 
import numpy as np 
import argparse 
import imutils 
import cv2 
import time 

# construct the argument parse and parse the arguments 
ap = argparse.ArgumentParser() 
ap.add_argument("-v", "--video", 
    help="path to the (optional) video file") 
ap.add_argument("-b", "--buffer", type=int, default=10, 
    help="max buffer size") 
ap.add_argument("-a", "--min-area", type=int, default=500, help="minimum area size") 

args = vars(ap.parse_args()) 
# define the lower and upper boundaries of the "blue" 
# ball in the HSV color space, then initialize the 
# list of tracked points 
greenLower = (48, 62, 88) 
greenUpper = (151, 238, 255) 
pts = deque(maxlen=args["buffer"]) 
tintervals = deque(maxlen=args["buffer"]) 
tPrev = 0; 
pRad = 0 
mapix = 0 
mspeed = 0 

# if a video path was not supplied, grab the reference 
# to the webcam 
if not args.get("video", False): 
    camera = cv2.VideoCapture(0) 

# otherwise, grab a reference to the video file 
else: 
    camera = cv2.VideoCapture(args["video"]) 
    # keep looping 

#initialize background subtraction 
fgbg = cv2.createBackgroundSubtractorMOG2() 

while True: 
    # grab the current frame 
    (grabbed, frame) = camera.read() 
    displayx = 0 
    # start counting time 
    tPrev = time.time() 

    # if we are viewing a video and we did not grab a frame, 
    # then we have reached the end of the video 
    if args.get("video") and not grabbed: 
     break 

    # resize the frame and apply background subtraction 
    frame = imutils.resize(frame, width=500) 
    mask = fgbg.apply(frame) 
    res = cv2.bitwise_and(frame, frame, mask = mask) 

    # blur the frame and convert it to the HSV 
    blurred = cv2.GaussianBlur(res, (11, 11), 0) 
    hsv = cv2.cvtColor(res, cv2.COLOR_BGR2HSV) 

    # construct a mask for the color "blue", then perform 
    # a series of dilations and erosions to remove any small 
    # blobs left in the mask 

    mask = cv2.inRange(hsv, greenLower, greenUpper) 
    mask = cv2.erode(mask, None, iterations=2) 
    mask = cv2.dilate(mask, None, iterations=2) 

    # find contours in the mask and initialize the current 
    # (x, y) center of the ball 
    cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, 
     cv2.CHAIN_APPROX_SIMPLE)[-2] 
    center = None 

    # only proceed if at least one contour was found 
    if len(cnts) > 0: 
     # find the largest contour in the mask, then use 
     # it to compute the minimum enclosing circle and 
     # centroid 
     c = max(cnts, key=cv2.contourArea) 
     ((x, y), radius) = cv2.minEnclosingCircle(c) 
     pRad = radius 
     M = cv2.moments(c) 
     center = (int(M["m10"]/M["m00"]), int(M["m01"]/M["m00"])) 

     # only proceed if the radius meets a minimum size 
     if radius > 10: 
      # draw the circle and centroid on the frame, 
      # then update the list of tracked points 
      cv2.circle(frame, (int(x), int(y)), int(radius), 
       (0, 255, 255), 2) 
      cv2.circle(frame, center, 5, (0, 0, 255), -1) 

    # update time intervals queue 
    tintervals.appendleft(time.time() - tPrev) 

    # update the points queue 
    pts.appendleft(center) 

    # predict position of the ball 
    if (pRad > 0 and len(pts) > 5): 
     if pts[0] != None and pts[1] != None: 
      apix = 98.1/(0.032/pRad) 
      mapix = apix 

      y0 = pts[0][1] 
      x0 = pts[0][0] 

      kalmanin = np.array((2,1), np.float32) # measurement 
      kalmanout = np.zeros((4,1), np.float32) # tracked/prediction 

      kalmanin = np.array([[np.float32(x0)],[np.float32(y0)]]) 

      tkalman = 0.01 

      kalman = cv2.KalmanFilter(4,2) 
      kalman.measurementMatrix = np.array([[1,0,0,0],[0,1,0,0]],np.float32) 
      kalman.transitionMatrix = np.array([[1,0,tkalman,0],[0,1,0,tkalman],[0,0,1,0],[0,0,0,1]],np.float32) 
      kalman.controlMatrix = np.array([[0],[0.5*(tkalman**2.0)], [0],[tkalman]],np.float32) * mapix 
      kalman.processNoiseCov = np.array([[1,0,0,0],[0,1,0,0],[0,0,1,0],[0,0,0,1]],np.float32) * 0.03 
      kalman.processNoiseCov = np.array([[1,0,0,0],[0,1,0,0],[0,0,1,0],[0,0,0,1]],np.float32) * 0.03 
      kalman.measurementNoiseCov = np.array([[1,0],[0,1]],np.float32) * 0.00009 

      kalman.correct(kalmanin) 
      kalmanout = kalman.predict(kalman.controlMatrix) 

      x0 = kalmanout[0] 
      y0 = kalmanout[1] 
      vx = kalmanout[2] 
      vy = kalmanout[3] 

      displayx = x0 

      listX = [] 
      listY = [] 

      for i in range(1, 11): 
       t = 0.01 * i 
       y = y0 + vy * t + (apix * (t ** 2))/2 
       x = x0 + vx * t 
       listX.append(int(x)) 
       listY.append(int(y)) 
       mspeed = vy 

      for i in range(0, 9): 
       cv2.line(frame, (listX[i], listY[i]), (listX[i+1], listY[i+1]), (255, 0, 0), 4) 


    # loop over the set of tracked points 
    for i in xrange(1, len(pts)): 
     # if either of the tracked points are None, ignore 
     # them 
     if pts[i - 1] is None or pts[i] is None: 
      continue 

     # otherwise, compute the thickness of the line and 
     # draw the connecting lines 
     thickness = int(np.sqrt(args["buffer"]/float(i + 1)) * 2.5) 
     cv2.line(frame, pts[i - 1], pts[i], (0, 0, 255), thickness) 


    cv2.putText(frame, "y axis speed: {}".format(displayx), 
     (120, frame.shape[0] - 70), cv2.FONT_HERSHEY_SIMPLEX, 
     0.5, (0, 0, 255), 1) 


    cv2.putText(frame, "radius in px: {}".format(pRad), 
     (120, frame.shape[0] - 30), cv2.FONT_HERSHEY_SIMPLEX, 
     0.5, (0, 0, 255), 1) 

    cv2.putText(frame, "apix: {}".format(mapix), 
     (120, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 
     0.5, (0, 0, 255), 1) 

    if (mapix != 0): 
     cv2.putText(frame, "radius in meters: {}".format((9.81*pRad)/mapix), 
      (120, frame.shape[0] - 50), cv2.FONT_HERSHEY_SIMPLEX, 
      0.5, (0, 0, 255), 1) 

    # shows x, y position, (newest input from pts) 
    cv2.putText(frame, "x, y: {}".format(pts[0]), 
     (10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 
     0.35, (0, 0, 255), 1) 

    # show the frame to our screen 
    cv2.imshow("Frame", frame) 
    key = cv2.waitKey(1) & 0xFF 

    # if the 'q' key is pressed, stop the loop 
    if key == ord("q"): 
     break 

# cleanup the camera and close any open windows 
camera.release() 
cv2.destroyAllWindows() 

Répondre

0

tout d'abord, je propose l'initialisation du filtre de Kalman en dehors de la boucle le principal problème avec le code est que. vous avez défini la matrice de contrôle Si je comprends votre ta Vous n'observez le système que si vous ne le contrôlez pas. Ignorez simplement l'initialisation kalman.controlMatrix ou définissez-la sur une matrice zéro. Dans la boucle, il suffit alors d'utiliser

kalmanout = kalman.predict() 
kalman.correct(kalmanin) 
+0

En fait, j'ai besoin de prévoir la trajectoire. Déplacer l'initialisation de Kalman en dehors de la boucle résout les deux problèmes, maintenant j'ai juste besoin d'ajuster la covariance. Je vous remercie! – dastez00