IyBUaGlzIHNjcmlwdCBhZGRzIGEgdHJhaWxpbmcgbGlnaHQgZWZmZWN0ICgmcXVvdDvEkSZlZ3JhdmU7biB2JmVncmF2ZTtvIMSRJmlhY3V0ZTt0JnF1b3Q7KSB0byBhIHRyYWNrZWQgb2JqZWN0IGluIGEgdmlkZW8gdXNpbmcgT3BlbkNWIGFuZCBNb3ZpZVB5LgoKZnJvbSBtb3ZpZXB5LmVkaXRvciBpbXBvcnQgVmlkZW9GaWxlQ2xpcCwgQ29tcG9zaXRlVmlkZW9DbGlwCmltcG9ydCBjdjIKaW1wb3J0IG51bXB5IGFzIG5wCmZyb20gbW92aWVweS52aWRlby5WaWRlb0NsaXAgaW1wb3J0IFZpZGVvQ2xpcAoKIyBMb2FkIHZpZGVvCnZpZGVvX3BhdGggPSAmcXVvdDsvbW50L2RhdGEvc25hcHRpay52bl9iMjk2MS5tcDQmcXVvdDsKY2xpcCA9IFZpZGVvRmlsZUNsaXAodmlkZW9fcGF0aCkKCiMgVXNlIGEgc2ltcGxlIHRyYWNrZXIgYmFzZWQgb24gYmFja2dyb3VuZCBzdWJ0cmFjdGlvbiAoZm9yIGRlbW9uc3RyYXRpb24pCmNhcCA9IGN2Mi5WaWRlb0NhcHR1cmUodmlkZW9fcGF0aCkKZnBzID0gY2FwLmdldChjdjIuQ0FQX1BST1BfRlBTKQp3aWR0aCA9IGludChjYXAuZ2V0KGN2Mi5DQVBfUFJPUF9GUkFNRV9XSURUSCkpCmhlaWdodCA9IGludChjYXAuZ2V0KGN2Mi5DQVBfUFJPUF9GUkFNRV9IRUlHSFQpKQoKIyBCYWNrZ3JvdW5kIHN1YnRyYWN0b3IKZmdiZyA9IGN2Mi5jcmVhdGVCYWNrZ3JvdW5kU3VidHJhY3Rvck1PRzIoKQpwb3NpdGlvbnMgPSBbXQoKIyBUcmFjayBvYmplY3QgcG9zaXRpb25zCndoaWxlIFRydWU6CiAgICByZXQsIGZyYW1lID0gY2FwLnJlYWQoKQogICAgaWYgbm90IHJldDoKICAgICAgICBicmVhawogICAgZmdtYXNrID0gZmdiZy5hcHBseShmcmFtZSkKICAgIGNvbnRvdXJzLCBfID0gY3YyLmZpbmRDb250b3VycyhmZ21hc2ssIGN2Mi5SRVRSX0VYVEVSTkFMLCBjdjIuQ0hBSU5fQVBQUk9YX1NJTVBMRSkKICAgIGlmIGNvbnRvdXJzOgogICAgICAgIGxhcmdlc3QgPSBtYXgoY29udG91cnMsIGtleT1jdjIuY29udG91ckFyZWEpCiAgICAgICAgTSA9IGN2Mi5tb21lbnRzKGxhcmdlc3QpCiAgICAgICAgaWYgTVsmcXVvdDttMDAmcXVvdDtdICE9IDA6CiAgICAgICAgICAgIGN4ID0gaW50KE1bJnF1b3Q7bTEwJnF1b3Q7XSAvIE1bJnF1b3Q7bTAwJnF1b3Q7XSkKICAgICAgICAgICAgY3kgPSBpbnQoTVsmcXVvdDttMDEmcXVvdDtdIC8gTVsmcXVvdDttMDAmcXVvdDtdKQogICAgICAgICAgICBwb3NpdGlvbnMuYXBwZW5kKChjeCwgY3kpKQogICAgICAgIGVsc2U6CiAgICAgICAgICAgIHBvc2l0aW9ucy5hcHBlbmQoKDAsIDApKQogICAgZWxzZToKICAgICAgICBwb3NpdGlvbnMuYXBwZW5kKCgwLCAwKSkKCmNhcC5yZWxlYXNlKCkKCiMgR2VuZXJhdGUgZWZmZWN0IHBlciBmcmFtZQpkZWYgbWFrZV9lZmZlY3RfZnJhbWUodCk6CiAgICBmcmFtZSA9IG5wLnplcm9zKChoZWlnaHQsIHdpZHRoLCA0KSwgZHR5cGU9bnAudWludDgpCiAgICBpbmRleCA9IGludCh0ICogZnBzKQogICAgdGFpbF9sZW5ndGggPSAxMAogICAgZm9yIGkgaW4gcmFuZ2UodGFpbF9sZW5ndGgpOgogICAgICAgIGlkeCA9IGluZGV4IC0gaQogICAgICAgIGlmIDAgJmx0Oz0gaWR4ICZsdDsgbGVuKHBvc2l0aW9ucyk6CiAgICAgICAgICAgIHgsIHkgPSBwb3NpdGlvbnNbaWR4XQogICAgICAgICAgICBhbHBoYSA9IGludCgyMDAgKiAoMSAtIGkgLyB0YWlsX2xlbmd0aCkpCiAgICAgICAgICAgIGN2Mi5jaXJjbGUoZnJhbWUsICh4LCB5KSwgMjAsICgyNTUsIDI1NSwgMCwgYWxwaGEpLCAtMSkKICAgIHJldHVybiBmcmFtZQoKIyBDcmVhdGUgYW5kIG92ZXJsYXkgZWZmZWN0IGNsaXAKZWZmZWN0ID0gVmlkZW9DbGlwKG1ha2VfZWZmZWN0X2ZyYW1lLCBkdXJhdGlvbj1jbGlwLmR1cmF0aW9uKS5zZXRfZHVyYXRpb24oY2xpcC5kdXJhdGlvbikKZmluYWwgPSBDb21wb3NpdGVWaWRlb0NsaXAoW2NsaXAsIGVmZmVjdF0pCgojIE91dHB1dCB2aWRlbwpmaW5hbC53cml0ZV92aWRlb2ZpbGUoJnF1b3Q7L21udC9kYXRhL291dHB1dF93aXRoX3RhaWxfbGlnaHQubXA0JnF1b3Q7LCBjb2RlYz0mcXVvdDtsaWJ4MjY0JnF1b3Q7LCBhdWRpb19jb2RlYz0mcXVvdDthYWMmcXVvdDspCg==
# This script adds a trailing light effect ("đèn vèo đít") to a tracked object in a video using OpenCV and MoviePy.
from moviepy.editor import VideoFileClip, CompositeVideoClip
import cv2
import numpy as np
from moviepy.video.VideoClip import VideoClip
# Load video
video_path = "/mnt/data/snaptik.vn_b2961.mp4"
clip = VideoFileClip(video_path)
# Use a simple tracker based on background subtraction (for demonstration)
cap = cv2.VideoCapture(video_path)
fps = cap.get(cv2.CAP_PROP_FPS)
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
# Background subtractor
fgbg = cv2.createBackgroundSubtractorMOG2()
positions = []
# Track object positions
while True:
ret, frame = cap.read()
if not ret:
break
fgmask = fgbg.apply(frame)
contours, _ = cv2.findContours(fgmask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
if contours:
largest = max(contours, key=cv2.contourArea)
M = cv2.moments(largest)
if M["m00"] != 0:
cx = int(M["m10"] / M["m00"])
cy = int(M["m01"] / M["m00"])
positions.append((cx, cy))
else:
positions.append((0, 0))
else:
positions.append((0, 0))
cap.release()
# Generate effect per frame
def make_effect_frame(t):
frame = np.zeros((height, width, 4), dtype=np.uint8)
index = int(t * fps)
tail_length = 10
for i in range(tail_length):
idx = index - i
if 0 <= idx < len(positions):
x, y = positions[idx]
alpha = int(200 * (1 - i / tail_length))
cv2.circle(frame, (x, y), 20, (255, 255, 0, alpha), -1)
return frame
# Create and overlay effect clip
effect = VideoClip(make_effect_frame, duration=clip.duration).set_duration(clip.duration)
final = CompositeVideoClip([clip, effect])
# Output video
final.write_videofile("/mnt/data/output_with_tail_light.mp4", codec="libx264", audio_codec="aac")