0% found this document useful (0 votes)
11 views3 pages

# Code to control webcam based on u

The document contains code for controlling a webcam using an ultrasonic sensor for gesture recognition. It sets up gesture detection with MediaPipe and an ultrasonic sensor with Arduino to activate the webcam based on distance. The code also includes functionality to count fingers and toggle a light based on specific gestures.

Uploaded by

sssjyotirmoy07
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
11 views3 pages

# Code to control webcam based on u

The document contains code for controlling a webcam using an ultrasonic sensor for gesture recognition. It sets up gesture detection with MediaPipe and an ultrasonic sensor with Arduino to activate the webcam based on distance. The code also includes functionality to count fingers and toggle a light based on specific gestures.

Uploaded by

sssjyotirmoy07
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 3

# Code to control webcam based on ultrasonic sensor detection for gesture

recognition.

# Code 1 Imports (Gesture Detection)


import cv2
import mediapipe as mp
import numpy as np

# Code 2 Imports (Ultrasonic Sensor with Arduino)


from pyfirmata import Arduino, util # Use PyFirmata to communicate with the
Arduino
import time

# Gesture Detection Setup


mp_hands = mp.solutions.hands
hands = mp_hands.Hands(min_detection_confidence=0.7, min_tracking_confidence=0.7)
mp_drawing = mp.solutions.drawing_utils

# Ultrasonic Sensor and Arduino Setup


TRIG_PIN = 2
ECHO_PIN = 3
board = Arduino('/dev/ttyUSB0') # Adjust for your Arduino port
board.digital[TRIG_PIN].mode = board.OUTPUT
board.digital[ECHO_PIN].mode = board.INPUT

# Function to get distance from the ultrasonic sensor


def get_distance():
board.digital[TRIG_PIN].write(1)
time.sleep(0.00001)
board.digital[TRIG_PIN].write(0)

# Measure the duration


start_time = time.time()
while board.digital[ECHO_PIN].read() == 0:
start_time = time.time()
while board.digital[ECHO_PIN].read() == 1:
end_time = time.time()

# Calculate distance
duration = end_time - start_time
distance = (duration * 34300) / 2 # Speed of sound in cm/s
return distance

# Gesture Counting Function


def count_fingers(hand_landmarks):
finger_tips = [8, 12] # Index and Middle finger tips
count = 0
for tip in finger_tips:
if hand_landmarks.landmark[tip].y < hand_landmarks.landmark[tip - 2].y:
count += 1
return count

# Initial Variables
cap = None
light_on = False
webcam_active = False
distance_threshold = 30 # Distance threshold to trigger the camera (in cm)

# Main Loop
while True:
# Ultrasonic Sensor Reading
distance = get_distance()
print("Distance:", distance, "cm")

# Check if within threshold to toggle webcam


if distance < distance_threshold:
if not webcam_active:
print("Activating webcam...")
cap = cv2.VideoCapture(0) # Activate the webcam
webcam_active = True
else:
print("Deactivating webcam...")
cap.release() # Deactivate the webcam
cv2.destroyAllWindows()
webcam_active = False
time.sleep(1) # Add delay to prevent quick toggles

# If webcam is active, perform gesture detection


if webcam_active and cap is not None and cap.isOpened():
success, frame = cap.read()
if not success:
break

# Convert image color to RGB


image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
results = hands.process(image)

# Convert back to BGR for OpenCV


image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)

if results.multi_hand_landmarks:
for hand_landmarks in results.multi_hand_landmarks:
mp_drawing.draw_landmarks(image, hand_landmarks,
mp_hands.HAND_CONNECTIONS)

# Count the number of fingers up


fingers_up = count_fingers(hand_landmarks)

# Toggle light based on fingers up


if fingers_up == 2 and not light_on:
light_on = True
print("Light ON")
elif fingers_up == 1 and light_on:
light_on = False
print("Light OFF")

# Display light status on the image


status_text = "Light ON" if light_on else "Light OFF"
color = (0, 255, 0) if light_on else (0, 0, 255)
cv2.putText(image, status_text, (10, 50), cv2.FONT_HERSHEY_SIMPLEX,
1.5, color, 3)

# Display the resulting frame


cv2.imshow('Hand Gesture Control', image)

# Exit on pressing 'q'


if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release resources on exit
if cap:
cap.release()
cv2.destroyAllWindows()
hands.close()
board.exit()

You might also like