Reputation: 207
How can i use Kalman filter to track the movements of a person in a video in real-time? I am new to kalman and I was experimenting with it. I have been able to run kalman and predict path of a ball in a video.
Here's the code for background substraction:
import cv2
import numpy as np
import matplotlib.pyplot as plt
file="singleball.mov"
capture = cv2.VideoCapture(file)
print "\t Width: ",capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH)
print "\t Height: ",capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)
print "\t FourCC: ",capture.get(cv2.cv.CV_CAP_PROP_FOURCC)
print "\t Framerate: ",capture.get(cv2.cv.CV_CAP_PROP_FPS)
numframes=capture.get(7)
print "\t Number of Frames: ",numframes
count=0
history = 10
nGauss = 3
bgThresh = 0.6
noise = 20
bgs = cv2.BackgroundSubtractorMOG(history,nGauss,bgThresh,noise)
plt.figure()
plt.hold(True)
plt.axis([0,480,360,0])
measuredTrack=np.zeros((numframes,2))-1
while count<numframes:
count+=1
img2 = capture.read()[1]
cv2.imshow("Video",img2)
foremat=bgs.apply(img2)
cv2.waitKey(100)
foremat=bgs.apply(img2)
ret,thresh = cv2.threshold(foremat,127,255,0)
contours, hierarchy = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
if len(contours) > 0:
m= np.mean(contours[0],axis=0)
measuredTrack[count-1,:]=m[0]
plt.plot(m[0,0],m[0,1],'ob')
cv2.imshow('Foreground',foremat)
cv2.waitKey(80)
capture.release()
print measuredTrack
np.save("ballTrajectory", measuredTrack)
plt.show()
Here's the code for Constant velocity Kalman filter:
import numpy as np
from pykalman import KalmanFilter
from matplotlib import pyplot as plt
Measured=np.load("ballTrajectory.npy")
while True:
if Measured[0,0]==-1.:
Measured=np.delete(Measured,0,0)
else:
break
numMeas=Measured.shape[0]
MarkedMeasure=np.ma.masked_less(Measured,0)
Transition_Matrix=[[1,0,1,0],[0,1,0,1],[0,0,1,0],[0,0,0,1]]
Observation_Matrix=[[1,0,0,0],[0,1,0,0]]
xinit=MarkedMeasure[0,0]
yinit=MarkedMeasure[0,1]
vxinit=MarkedMeasure[1,0]-MarkedMeasure[0,0]
vyinit=MarkedMeasure[1,1]-MarkedMeasure[0,1]
initstate=[xinit,yinit,vxinit,vyinit]
initcovariance=1.0e-3*np.eye(4)
transistionCov=1.0e-4*np.eye(4)
observationCov=1.0e-1*np.eye(2)
kf=KalmanFilter(transition_matrices=Transition_Matrix,
observation_matrices =Observation_Matrix,
initial_state_mean=initstate,
initial_state_covariance=initcovariance,
transition_covariance=transistionCov,
observation_covariance=observationCov)
(filtered_state_means, filtered_state_covariances) = kf.filter(MarkedMeasure)
plt.plot(MarkedMeasure[:,0],MarkedMeasure[:,1],'xr',label='measured')
plt.axis([0,520,360,0])
plt.hold(True)
plt.plot(filtered_state_means[:,0],filtered_state_means[:,1],'ob',label='kalman output')
plt.legend(loc=2)
plt.title("Constant Velocity Kalman Filter")
plt.show()
Link of video that i used: https://www.hdm-stuttgart.de/~maucher/Python/ComputerVision/html/files/singleball.mov
Now, the problem is here i am storing the trajectory in a file and then i am using that file as an input for kalman. How can i extend this to make it real-time? and also How do I track a single person in a group where multiple people may be present and moving?
Python version: 2.7
OpenCV version: 2.4.13
Upvotes: 2
Views: 2110
Reputation: 5429
The code below shows an example of how to use the filter_update
method to take a single frame from the video at a time, and update the estimate of the state.
It's more or less based on the code which you shared, except that I've used the kf.smooth
method to estimate the properties of the kalman filter based on the first half of the frames, and then update state (position) estimate using the filter for subsequent frames. The pykalman
smooth
method will operate on a batch of measurements and try to estimate the covariance etc.
I've also modified the plotting so that you can see the updated state estimate as the video plays.
You'll see that the constant-velocity Kalman filter does a reasonable job of estimating where the ball is underneath the box (and when it will appear again).
Code:
import cv2
import numpy as np
import matplotlib.pyplot as plt
from pykalman import KalmanFilter
# Main settings:
file="singleball.mov"
filter_train_ratio = 0.5
capture = cv2.VideoCapture(file)
numframes=int(capture.get(7))
numframes_train = int(filter_train_ratio*numframes)
print "\t Total No. Frames: ", numframes
print "\t No. Frames Train: ", numframes_train
# Background filter settings:
history = 10
nGauss = 3
bgThresh = 0.6
noise = 20
bgs = cv2.BackgroundSubtractorMOG(history,nGauss,bgThresh,noise)
f = plt.figure()
plt.ion()
plt.axis([0,480,360,0])
measuredTrack = np.zeros((numframes_train,2))-1
measurementMissingIdx = [False]*numframes_train
# Get measured trace to train a Kalman Filter:
count=0
legendPlotted = False
while count<numframes_train:
count+=1
img2 = capture.read()[1]
cv2.imshow("Video",img2)
foremat=bgs.apply(img2)
cv2.waitKey(100)
foremat=bgs.apply(img2)
ret,thresh = cv2.threshold(foremat,127,255,0)
contours, hierarchy = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
if len(contours) > 0:
m= np.mean(contours[0],axis=0)
measuredTrack[count-1,:]=m[0]
if not legendPlotted:
plt.plot(m[0,0],m[0,1],'ob', label='measurement')
plt.legend(loc=2)
legendPlotted = True
else:
plt.plot(m[0,0],m[0,1],'ob')
plt.pause(0.05)
else:
measurementMissingIdx[count-1] = True
cv2.imshow('Foreground',foremat)
cv2.waitKey(80)
# Train the Kalman filter:
measurements = np.ma.asarray(measuredTrack)
measurements[measurementMissingIdx] = np.ma.masked
# Kalman filter settings:
Transition_Matrix=[[1,0,1,0],[0,1,0,1],[0,0,1,0],[0,0,0,1]]
Observation_Matrix=[[1,0,0,0],[0,1,0,0]]
kf=KalmanFilter(transition_matrices=Transition_Matrix,
observation_matrices =Observation_Matrix)
(smoothed_state_means, smoothed_state_covariances) = kf.smooth(measurements)
plt.plot(smoothed_state_means[:,0],smoothed_state_means[:,1],'xr',label='kalman output')
legend = plt.legend(loc=2)
plt.title("Constant Velocity Kalman Filter")
# Apply (pre-trained) filter one interval at a time,
# with plotting in real time.
x_now = smoothed_state_means[-1, :]
P_now = smoothed_state_covariances[-1, :]
legendPlotted = False
while count<numframes:
newMeasurement = np.ma.asarray(-1)
count+=1
img2 = capture.read()[1]
cv2.imshow("Video",img2)
foremat=bgs.apply(img2)
cv2.waitKey(100)
foremat=bgs.apply(img2)
ret,thresh = cv2.threshold(foremat,127,255,0)
contours, hierarchy = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
if len(contours) > 0:
m= np.mean(contours[0],axis=0)
newMeasurement = np.ma.asarray(m[0])
else:
newMeasurement = np.ma.masked
cv2.imshow('Foreground',foremat)
cv2.waitKey(80)
(x_now, P_now) = kf.filter_update(filtered_state_mean = x_now,
filtered_state_covariance = P_now,
observation = newMeasurement)
if not legendPlotted:
plt.plot(x_now[0],x_now[1],'xg', label='kalman update')
legendPlotted = True
plt.legend(loc=2)
else:
plt.plot(x_now[0],x_now[1],'xg')
plt.pause(0.05)
f.savefig("so_42941634.pdf", bbox_inches='tight')
Upvotes: 2