Drone Red Tracking over Webcam in Gazebo Simulation

With the dronekit library and opencv via Gazebo Simulation, I want it to follow when I show something red on my webcam.Now it does, but whenever it sees red, the video keeps freezing and I can’t follow.

Where am I going wrong or what should I do?

I would be glad if you help

import cv2
import numpy as np
import threading
import logging
import dronekit_sitl
from dronekit import connect, VehicleMode, LocationGlobalRelative, Command, LocationGlobal
import cv2
import argparse
import time
from pymavlink import mavutil



# Connect to the Quadcopter
parser = argparse.ArgumentParser()
parser.add_argument('--connect', default='127.0.0.1:14550')
args = parser.parse_args()
vehicle = connect(args.connect, baud=921600, wait_ready=True)

vehicle.airspeed = 3
vehicle.groundspeed = 3
ay = 0
ax = 0

class Velocity():
    def set_velocity_body(self, vehicle, vx, vy, vz):

        msg = vehicle.message_factory.set_position_target_local_ned_encode(
            0,
            0, 0,
            mavutil.mavlink.MAV_FRAME_BODY_NED,
            0b0000111111000111,  # -- BITMASK -> Consider only the velocities
            0, 0, 0,  # -- POSITION
            vx, vy, vz,  # -- VELOCITY
            0, 0, 0,  # -- ACCELERATIONS
            0, 0)

        vehicle.send_mavlink(msg)
        vehicle.flush()
        time.sleep(0.5)


def arm_and_takeoff(aTargetAltitude):

    while not vehicle.is_armable:

        time.sleep(1)

    vehicle.mode = VehicleMode("GUIDED")
    vehicle.armed = True

    while not vehicle.armed:
        time.sleep(1)

    vehicle.simple_takeoff(aTargetAltitude)

    while True:
        print(" Altitude: ", vehicle.location.global_relative_frame.alt)
        if vehicle.location.global_relative_frame.alt >= aTargetAltitude * 0.95:
            break
        time.sleep(1)


def go(p):
    vehicle.simple_goto(p, 3)


def center_shape(x, y, w, h, frame, t, color, a):
    cv2.putText(frame, color + str(t), (int(x), int(y + 20)), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 255))
    M = cv2.moments(a)
    ax = int(M["m10"] / M["m00"])
    ay = int(M["m01"] / M["m00"])
    cv2.circle(frame, (ax, ay), 3, (255, 255, 255), -1)
    cv2.putText(frame, "center", (ax - 20, ay - 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 1)
    return ax, ay


def moving(ax, ay):
    v2 = Velocity()
    if ax < 240:
        v2.set_velocity_body(vehicle, 1, 0, 0)
        print("Forward")
    elif ax > 240:
        v2.set_velocity_body(vehicle, -1, 0, 0)
        print("back")
    if ay > 240:
        v2.set_velocity_body(vehicle, 0, -1, 0)
        print("Left")
    elif ay < 240:
        v2.set_velocity_body(vehicle, 0, 1, 0)
        print("Right")


def opencv(ax, ay):
    capture = cv2.VideoCapture(0)
    center = False
    lower_red = np.array([130, 150, 150])
    upper_red = np.array([200, 255, 255])
    time.sleep(0.1)
    while True:
        t = 0
        ret, shot = capture.read()
        cv2.circle(shot, (240, 240), 3, (255, 255, 255), -1)
        blur = cv2.GaussianBlur(shot, (11, 11), 0)
        hsv = cv2.cvtColor(blur, cv2.COLOR_BGR2HSV)
        j = 0
        mask = cv2.inRange(hsv, lower_red, upper_red)
        kernelOPen = np.ones((20, 20))
        mask = cv2.dilate(mask, kernelOPen, iterations=5)
        countours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
        if j == 0:
            for a in countours:
                area = cv2.contourArea(a)
                peri = 0.01 * cv2.arcLength(a, True)
                approx = cv2.approxPolyDP(a, peri, True)
                if 10 < len(approx) <= 20:
                    if area > 1000:
                        x, y, w, h = cv2.boundingRect(a)
                        color = "Fire"
                        ax, ay = center_shape(x, y, w, h, shot, t, color, a)
                        moving(ax, ay)

        cv2.imshow('cam', shot)
        cv2.waitKey(1)
        if (180 <= ax <= 300 and 180 <= ay <= 300):
            center = True
        if center == True:
            lat = vehicle.location.global_relative_frame.lat
            lon = vehicle.location.global_relative_frame.lon  # current location
            currentLoc = LocationGlobalRelative(lat, lon, 2.5)
            vehicle.simple_goto(currentLoc)
            time.sleep(7)
            capture.release()
            cv2.destroyAllWindows()
            break


arm_and_takeoff(5)

time.sleep(2)

p = LocationGlobalRelative(-35.36309829, 149.16520472, 5)
go(p)
time.sleep(5)

opencv(ax, ay)


vehicle.close()


Hi, did you find the solution?