I would like to track an object using optical flow.
Since the screen is filled with past loci, I would like to delete the locus at 10 second intervals and implement repeated drawing.
I would appreciate it if you could teach me.

Corresponding source code
import cv2
import numpy as np
#Video capture
cap = cv2.VideoCapture ("C: /github/sample/python/opencv/video/input2.mp4")
#Shi-Tomasi method parameters (corners: detect the corners of objects as feature points)
ft_params = dict (maxCorners = 100, #maximum number of feature points
                 qualityLevel = 0.3, # Threshold for selecting feature points. The higher the threshold, the more carefully selected and decrease the number of feature points.
                 minDistance = 7, # Minimum distance between feature points (points close to feature points are not feature points)
                 blockSize = 7) # Block (peripheral area) size used to calculate feature points
# Lucas-Kanade method parameters (for tracking)
lk_params = dict (winSize = (15, 15), #peripheral area size used to calculate optical flow estimation
                 maxLevel = 2, # Number of pyramids (default 0: 2 uses up to 1/4 image)
                 criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03)) # Search algorithm termination condition
# Get the first frame and convert to ray scale
ret, frame = cap.read ()
gray1 = cv2.cvtColor (frame, cv2.COLOR_BGR2GRAY)
# Shi-Tomasi method to detect feature points
ft1 = cv2.goodFeaturesToTrack (
    gray1, mask = None, ** ft_params)
Generate an array for #mask
mask = np.zeros_like (frame)
#Repeat until the end of the video
while (cap.isOpened ()):
    # Get the next frame and convert it to grayscale
    ret, frame = cap.read ()
    gray2 = cv2.cvtColor (frame, cv2.COLOR_BGR2GRAY)
    # Lucas-Kanade method for optical flow wp calculation of feature points between frames
    ft2, status, err = cv2.calcOpticalFlowPyrLK (
        gray1, gray2, ft1, None, ** lk_params)
    # Acquire the feature point that detected the optical flow (if 1 is detected)
    good1 = ft1 [status == 1] # 1st frame
    good2 = ft2 [status == 1] # 2nd frame
    #Draw feature points and optical flow on frame mask
    for i, (pt2, pt1) in enumerate (zip (good2, good1)):
        x1, y1 = pt1.ravel () # Feature point coordinates of the first frame
        x2, y2 = pt2.ravel () # 2nd frame feature point coordinates
        # Draw a trajectory (draw on a mask to keep past tracks)
        mask = cv2.line (mask, (x2, y2), (x1, y1), [0, 0, 200], 2)
        # Draw optical flow in the current frame
        frame = cv2.circle (frame, (x2, y2), 5, [0, 0, 200], -1)
    # Logical product of frame and mask (composite)
    img = cv2.add (frame, mask)
    # Display in window
    cv2.imshow ('mask', img)
    #Preparing the next frame and points
    gray1 = gray2.copy () # Set next frame as first frame
    ft1 = good2.reshape (-1, 1, 2) # Set the next point as the first point
    Ends halfway when the # q key is pressed
    if cv2.waitKey (30)&0xFF == ord ('q'):
# End processing
cv2.destroyAllWindows ()
cap.release ()
  • Answer # 1

    maskIf you are displaying the past trajectory by drawing a line on
    Every 10 secondsmaskIt would be better to initialize the contents of (to the state where no line is drawn).