Virtual Telepresence Robot Using Raspberry Pi

1
919

Virtual telepresence robot is a remote-controlled, wheeled device with a camera to capture the environment in visual form and give the appearance of being present at that place. The captured visuals are displayed on the user’s virtual reality (VR) 3 headset . The camera will camera move in the direction of the user’s head movements. It can move in any direction through controller.

Working and Explanation

In Virtual Telepresence and Gesture Controlled Robot, Live video captured by the Raspberry Pi camera can be viewed on an Android phone installed in a virtual reality (VR) box. The project allows the user to experience the real world with a VR box. Dual screen mode is enabled on the smartphone for this purpose.

 The smartphone reads the accelerometer data and the local magnetometer when the user turns his head. This data was sent to the modem over Wi-Fi and to the Raspberry Pi board, which, in turn, provides these values as input to servo motors.

Two servo motors are used to move the camera – one for pedestrian motion and the other for dormant motion. So, when you turn your head and the VR box to the right side, the Raspberry Pi camera will also turn directly, meaning that as soon as you move your head it goes more and more with the movement of your head.

Components Used:

  • Raspberry pi 3 model b+
  • Arduino Nano
  • Servo Motor
  • RF Serial 434 MHz Module HC-11
  • Joystick
  • L298N DC Motor Driver
  • PWM-For controlling speed
  • DC Motor
Block Diagram

Robot movements

Joystick Controller

On the transmitter side we have design our own controller. We have connected a thumb joystick with Arduino Nano and using rf module HC-11 434mhz we send the signals to robot on receiver.

virtual telepresence robot
VR Robotic Telepresence Platform using Raspberry Pi
Joystick Controller

Robot

At this side we receive signals from the joystick controller through HC-11 434mhz rf module and sends the values to Arduino then Arduino sends that signals to motor drivers which will make dc motor movements.

Telepresence Robot with Autonomous Navigation

Robot receiving part

Results

Virtual Telepresence Robot Using Raspberry Pi 
 Virtual Telepresence and Gesture Controlled Robot

Vr Telepresence Robot Using Raspberry Pi
virtual Telepresence Robot

Coding(VR Robotic Telepresence Platform using Raspberry Pi )

#include<SoftwareSerial.h> 
SoftwareSerial rf(3,2); // tx,rx char ch;
int xpin=A1;
int ypin=A2; 
int xvar,
yvar; 
void setup() 
{
Serial.begin(9600); 
rf.begin(9600); 
pinMode(xpin,INPUT); 
pinMode(ypin,INPUT);
}
void loop() 
{ 
xvar=analogRead(xpin);
 Serial.print("X-axis=");
 Serial.print (xvar);
 Serial.println(" "); 
yvar=analogRead(ypin);
 Serial.print("Y-axis=");
 Serial.print(yvar);
 Serial.println(" ");
//delay(1000);
if ((xvar>=510 and xvar<=517 ) and (yvar>=500 and yvar <= 506 ))
{
Serial.println("Stop"); 
rf.write('S');
//delay(200);
}
 
else if ((xvar>=1000 and xvar<=1024 ) and (yvar>=500 and yvar<= 507))
{
Serial.println("Up");
 rf.write('U');
//delay(200);
}
else if ((xvar>=0 and xvar<=10 ) and (yvar>=500 and yvar <=507 ))
{
Serial.println("Down");
 rf.write('D');
//delay(200);
}
//
else if ((xvar>=400 and xvar<=840 ) and (yvar>=1000 and yvar<=1024 ))
{
Serial.println("Right");
 rf.write('R');
//delay(200);
}
//
else if ((xvar>=500 and xvar<=517 ) and (yvar>=0 and yvar <= 20 ))
{
Serial.println("Left");
 rf.write('L');
//delay(200);
}
}

Receiver Code

int in1=2;
 
int in2=4;
 int in3=6;
 int in4=7;
 int ena=3;
 int enb=5;
 char ch;
 void setup()
{
Serial.begin(9600);
pinMode(in1,OUTPUT); 
pinMode(in2,OUTPUT); 
pinMode(in3,OUTPUT);
pinMode(in4,OUTPUT); 
pinMode(ena,OUTPUT); 
pinMode(enb,OUTPUT); 
analogWrite(ena,180); 
analogWrite(enb,180);
// put your setup code here, to run once:
//rf.begin(9600);
}
void loop()
{
if(Serial.available()>0)
{
ch= Serial.read(); 
Serial.println(ch);
//if (Serial.available()>0)
//{
// char data=Serial.read();
// Serial.print(data);
 
// Serial.print("\n");
//forward if(ch=='U')
{
digitalWrite(in1,HIGH); 
digitalWrite(in2,LOW); 
digitalWrite(in3,LOW); 
digitalWrite(in4,HIGH); 
Serial.print("move forward"); 
analogWrite(ena,180);
analogWrite(enb,180);
}
//backward if(ch=='D')
{
digitalWrite(in1,LOW); 
digitalWrite(in2,HIGH); 
digitalWrite(in3,HIGH); 
digitalWrite(in4,LOW); 
Serial.print("move backward"); 
analogWrite(ena,180);
analogWrite(enb,180);
}
//stop if(ch=='S')
{
digitalWrite(in1,LOW); 
digitalWrite(in2,LOW); 
digitalWrite(in3,LOW); 
digitalWrite(in4,LOW);
 
}
//right if(ch=='R')
{
digitalWrite(in1,HIGH); 
digitalWrite(in2,LOW); 
digitalWrite(in3,HIGH); 
digitalWrite(in4,LOW); 
analogWrite(ena,240);
analogWrite(enb,240);
}
//left if(ch=='L')
{
digitalWrite(in1,LOW); 
digitalWrite(in2,HIGH); 
digitalWrite(in3,LOW); 
digitalWrite(in4,HIGH); 
analogWrite(ena,240);
analogWrite(enb,240);
}
}
}

Raspberry Pi coding

Servo coding

from time import sleep 
import RPi.GPIO as GPIO 
GPIO.setmode(GPIO.BCM) 
GPIO.setwarnings(False)
 

def setServoAngle(servo, angle):
     assert angle >=0 and angle <= 180 
     pwm = GPIO.PWM(servo, 50) 
     pwm.start(8)
     dutyCycle = angle / 18. + 3. 
     pwm.ChangeDutyCycle(dutyCycle) s
     leep(0.3)
     pwm.stop()
if    name 	== ' main ': 
     import sys
     servo = int(sys.argv[1]) 
     GPIO.setup(servo, GPIO.OUT) 
     setServoAngle(servo, int(sys.argv[2])) 
     GPIO.cleanup()

Servo movement Coding

#  right turn at angle =0 degree  pin no 17 
# rest at angle =90
#left at angle =170 degree import RPi.GPIO as GPIO import socket
import csv import time import math import argparse import imutils
import collections import os
#import io
 
#import picamera 
#import logging 
#import socketserver
#from threading import Condition 
#from http import server 
tilt_11=0
tilt_11=0
def positionServo (servo, angle):
    os.system("python angleServoCtrl.py " + str(servo) + " " +        str(angle))
    print("[INFO] Positioning servo at GPIO {0} to {1} degrees\n".format(servo, angle)) 
GPIO.setmode(GPIO.BCM)
UDP_IP = "192.168.1.104"
UDP_PORT = 8000
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1) sock.setsockopt(socket.SOL_SOCKET,socket.SO_BROADCAST,1) sock.bind((UDP_IP, UDP_PORT))
#def dist(a,b):
#    return math.sqrt((a*a)+(b*b)) 
#def get_y_rotation(x,y,z):
#    radians = math.atan2(x, dist(y,z)) 
#    return math.degrees(radians) 
#def get_x_rotation(x,y,z):
#    radians = math.atan2(y, dist(x,z)) 
#    return math.degrees(radians)
def get_acc(data): # function calling and passing the arguments 
     global tilt_11
     split=[data[i] for i in range(0,len(data))] 
#    split[1]=3
imu_op=bytes(split)
 
     #print(imu_op) 
     im=str(imu_op)
     a,b,acc_x, acc_y ,acc_z =im.split(",") 
#    f=float(acc_x)
#    g=float(acc_y) 
#    h=float(acc_z) 
     #print(acc_z)
     acc_z2=acc_z[2:7] 
     acc_z1=float(acc_z2) 
     acc_x1=float(acc_x) 
     acc_z11=abs(acc_z1) 
     acc_x11=abs(acc_x1) 
     acc_y11=abs(acc_y1) 
     acc=(acc_y1); 
     print(acc);
     print("X-axises :{}\n Y-axises :{}\n Z-axises:
     {}\n". format(acc_x11,acc_y11,acc_z11)) 
     #print(type(acc_x1))     
     #print("Horizontal servo move")
if(((acc_x11)>9)and((acc_x11)<=10) and ((acc_y11)>0)and((acc_y11)<=0.5) and ((acc_z11)>0)and((acc_z11)<=2)):
     positionServo (17, 90)	#stable
     positionServo (18, 120) 
     print("stable.	");
     # time.sleep(2)
 elif(((acc_x11)>=5)and((acc_x11)<=7) and ((acc_y11)>=0)and((acc_y11)<=0.5) and ((acc_z11)>=4)and((acc_z11)<=9) ):
     positionServo (17,90)	#stable
     positionServo (18, 40) # up angle 
     print("up.	");
 
     # time.sleep(2)
elif(((acc_x11)>0)and((acc_x11)<5) and ((acc_y11)>=0)and((acc_y11)<=0.7) and ((acc_z11)>7)and((acc_z11)<=10)):
      positionServo (17,90)	#stable
      positionServo (18, 170) print("down.	");
      time.sleep(2)
elif(((acc_x11)>7)and((acc_x11)<=10) and ((acc_y11)>=0)and((acc_y11)<=1) and ((acc_z11)>0)and((acc_z11)<=2) ):
      positionServo (17,0)	#stable
      positionServo (18, 120) 
      print("RIGHT.	");
      #	time.sleep(2)
elif(((acc_x11)>7)and((acc_x11)<=10) and ((acc_y11)>=0)and((acc_y11)<=1.3) and ((acc_z11)>1)and((acc_z11)<=2.3)):
      positionServo (17,120)
      positionServo (18,120)	#stable
      # positionServo (27, 0) 
      print("left.	");
#	time.sleep(2)
#	tilt_1 =90-(int((90/9.96)*(acc_z1))) 
#	tilt_12=abs(tilt_11-tilt_1)
#	if(tilt_12>=10):
#	positionServo (17, tilt_1)
#	positionServo (27, tilt_1)
#		print(tilt_12) #	tilt_11=tilt_1
        return [acc_x,acc_y,acc_z] 
while True:
       data, addr = sock.recvfrom(1024) acc_op = get_acc(data)
 
# os.system("python3 cam.py");
# y_theta = get_y_rotation(acc_op[0],acc_op[1],acc_op[2]) 
# x_theta = get_x_rotation(acc_op[0],acc_op[1],acc_op[2]) 
# print(x_theta , "x_th" )
# print(y_theta , "y_th") 
       #time.sleep(1)

Raspberry Pi Camera Coding

# Web streaming example
# Source code from the official PiCamera package
# http://picamera.readthedocs.io/en/latest/recipes2.html#web-streaming 
import io
import picamera 
import logging 
import socketserver
from threading import Condition 
from http import server PAGE="""\
<html>
<head>
<title>Raspberry Pi - Surveillance Camera</title>
</head>
<body>
<center><h1>Raspberry Pi - Surveillance Camera</h1></center>
<center><img src="stream.mjpg" width="640" height="480"></center>
</body>
</html> """
class StreamingOutput(object): 
     def init (self):
       self.frame = None
 
     self.buffer = io.BytesIO() 
     self.condition = Condition()
def write(self, buf):
     if buf.startswith(b'\xff\xd8'):
        # New frame, copy the existing buffer's content and notify all 
        # clients it's available
        self.buffer.truncate() 
        with self.condition:
          self.frame = self.buffer.getvalue()                   
          self.condition.notify_all()
        self.buffer.seek(0)
return self.buffer.write(buf)
class StreamingHandler(server.BaseHTTPRequestHandler): 
def do_GET(self):
      if self.path == '/': self.send_response(301)
         self.send_header('Location', '/index.html')            
         self.end_headers()
elif self.path == '/index.html': 
         content = PAGE.encode('utf-8') self.send_response(200)
         self.send_header('Content-Type', 'text/html')         
         self.send_header('Content-Length', len(content)) 
         self.end_headers()
         self.wfile.write(content)
elif self.path == '/stream.mjpg': 
        self.send_response(200) 
        self.send_header('Age', 0)
        self.send_header('Cache-Control', 'no-cache, private')       
        self.send_header('Pragma', 'no-cache')
 
self.send_header('Content-Type', 'multipart/x-mixed-replace; boundary=FRAME') 
self.end_headers()
try:
      while True:
       with output.condition: 
         output.condition.wait() 
         frame = output.frame
      self.wfile.write(b'--FRAME\r\n') 
      self.send_header('Content-Type', 'image/jpeg')      
      self.send_header('Content-Length', len(frame)) 
      self.end_headers()
      self.wfile.write(frame) 
      self.wfile.write(b'\r\n')
except Exception as e: 
      logging.warning(
        'Removed streaming client %s: %s', 
         self.client_address, str(e))
else:
      self.send_error(404) 
      self.end_headers()
class StreamingServer(socketserver.ThreadingMixIn, server.HTTPServer): 
      allow_reuse_address = True
      daemon_threads = True
with picamera.PiCamera(resolution='640x480', framerate=24) as camera:
      output = StreamingOutput()      
      #Uncomment the next line to change your Pi's Camera rotation (in degrees) 
      #camera.rotation = 90
      camera.start_recording(output, format='mjpeg') 
try:
      address = ('192.168.1.105', 8000)
 
      server = StreamingServer(address, StreamingHandler)     
      server.serve_forever()
finally:
     camera.stop_recording()

Conclusion

VR Robotic Telepresence Platform using Raspberry Pi changes the way you feel. It enables people to be in more than one place at a time. It helps a person feel connected by providing a physical presence where one can be personally present. Driving your existing robot means you are free to roam around the office, attend meetings, visit work sites, go to class and do so much more from any time at any time. We use the following features in our presence robot:

  • Live video from a distant location
  • Control the robot over joystick
  • Real time rejoinder to user head movement

1 COMMENT

Leave a Reply