cs起源加机器人怎么让机器人跟随我 !

ROS turtlebot_follower :让机器人跟随我们移动 - CSDN博客
ROS turtlebot_follower :让机器人跟随我们移动
ROS turtlebot_follower 学习
首先在catkin_ws/src目录下载源码,地址:
了解代码见注释(其中有些地方我也不是很明白)
follower.cpp
#include &ros/ros.h&
#include &pluginlib/class_list_macros.h&
#include &nodelet/nodelet.h&
#include &geometry_msgs/Twist.h&
#include &sensor_msgs/Image.h&
#include &visualization_msgs/Marker.h&
#include &turtlebot_msgs/SetFollowState.h&
#include "dynamic_reconfigure/server.h"
#include "turtlebot_follower/FollowerConfig.h"
#include &depth_image_proc/depth_traits.h&
namespace turtlebot_follower
* The turtlebot follower nodelet. Subscribes to point clouds
* from the 3dsensor, processes them, and publishes command vel
* messages.
class TurtlebotFollower : public nodelet::Nodelet
* @brief The constructor for the follower.
* Constructor for the follower.
TurtlebotFollower() : min_y_(0.1), max_y_(0.5),
min_x_(-0.2), max_x_(0.2),
max_z_(0.8), goal_z_(0.6),
z_scale_(1.0), x_scale_(5.0)
~TurtlebotFollower()
delete config_srv_;
double min_y_; /**& The minimum y position of the points in the box. */
double max_y_; /**& The maximum y position of the points in the box. */
double min_x_; /**& The minimum x position of the points in the box. */
double max_x_; /**& The maximum x position of the points in the box. */
double max_z_; /**& The maximum z position of the points in the box. 框中 点的最大z位置,以上四个字段用来设置框的大小*/
double goal_z_; /**& The distance away from the robot to hold the centroid 离机器人的距离,以保持质心*/
double z_scale_; /**& The scaling factor for translational robot speed 移动机器人速度的缩放系数*/
double x_scale_; /**& The scaling factor for rotational robot speed 旋转机器人速度的缩放系数*/
enabled_; /**& Enable/ just prevents motor commands 启用/禁用追踪; 只是阻止电机命令,置为false后,机器人不会移动,/mobile_base/mobile_base_controller/cmd_vel topic 为空*/
ros::ServiceServer switch_srv_;
dynamic_reconfigure::Server&turtlebot_follower::FollowerConfig&* config_srv_;
virtual void onInit()
ros::NodeHandle& nh = getNodeHandle();
ros::NodeHandle& private_nh = getPrivateNodeHandle();
private_nh.getParam("min_y", min_y_);
private_nh.getParam("max_y", max_y_);
private_nh.getParam("min_x", min_x_);
private_nh.getParam("max_x", max_x_);
private_nh.getParam("max_z", max_z_);
private_nh.getParam("goal_z", goal_z_);
private_nh.getParam("z_scale", z_scale_);
private_nh.getParam("x_scale", x_scale_);
private_nh.getParam("enabled", enabled_);
cmdpub_ = private_nh.advertise&geometry_msgs::Twist& ("/mobile_base/mobile_base_controller/cmd_vel", 1);
markerpub_ = private_nh.advertise&visualization_msgs::Marker&("marker",1);
bboxpub_ = private_nh.advertise&visualization_msgs::Marker&("bbox",1);
sub_= nh.subscribe&sensor_msgs::Image&("depth/image_rect", 1, &TurtlebotFollower::imagecb, this);
switch_srv_ = private_nh.advertiseService("change_state", &TurtlebotFollower::changeModeSrvCb, this);
config_srv_ = new dynamic_reconfigure::Server&turtlebot_follower::FollowerConfig&(private_nh);
dynamic_reconfigure::Server&turtlebot_follower::FollowerConfig&::CallbackType f =
boost::bind(&TurtlebotFollower::reconfigure, this, _1, _2);
config_srv_-&setCallback(f);
void reconfigure(turtlebot_follower::FollowerConfig &config, uint32_t level)
min_y_ = config.min_y;
max_y_ = config.max_y;
min_x_ = config.min_x;
max_x_ = config.max_x;
max_z_ = config.max_z;
goal_z_ = config.goal_z;
z_scale_ = config.z_
x_scale_ = config.x_
void imagecb(const sensor_msgs::ImageConstPtr& depth_msg)
uint32_t image_width = depth_msg-&
ROS_INFO_THROTTLE(1, "image_width=%d", image_width);
float x_radians_per_pixel = 60.0/57.0/image_
float sin_pixel_x[image_width];
for (int x = 0; x & image_ ++x) {
sin_pixel_x[x] = sin((x - image_width/ 2.0)
* x_radians_per_pixel);
uint32_t image_height = depth_msg-&
float y_radians_per_pixel = 45.0/57.0/image_
float sin_pixel_y[image_height];
for (int y = 0; y & image_ ++y) {
sin_pixel_y[y] = sin((image_height/ 2.0 - y)
* y_radians_per_pixel);
float x = 0.0;
float y = 0.0;
float z = 1e6;
unsigned int n = 0;
const float* depth_row = reinterpret_cast&const float*&(&depth_msg-&data[0]);
int row_step = depth_msg-&step / sizeof(float);
for (int v = 0; v & (int)depth_msg-& ++v, depth_row += row_step)
for (int u = 0; u & (int)depth_msg-& ++u)
float depth = depth_image_proc::DepthTraits&float&::toMeters(depth_row[u]);
if (!depth_image_proc::DepthTraits&float&::valid(depth) || depth & max_z_) continue;
float y_val = sin_pixel_y[v] *
float x_val = sin_pixel_x[u] *
if ( y_val & min_y_ && y_val & max_y_ &&
x_val & min_x_ && x_val & max_x_)
z = std::min(z, depth);
ROS_INFO_THROTTLE(1, " n ==%d",n);
if (n&4000)
if(z & max_z_){
ROS_INFO_THROTTLE(1, "Centroid too far away %f, stopping the robot\n", z);
if (enabled_)
cmdpub_.publish(geometry_msgs::TwistPtr(new geometry_msgs::Twist()));
ROS_INFO_THROTTLE(1, "Centroid at %f %f %f with %d points", x, y, z, n);
publishMarker(x, y, z);
if (enabled_)
geometry_msgs::TwistPtr cmd(new geometry_msgs::Twist());
cmd-&linear.x = (z - goal_z_) * z_scale_;
cmd-&angular.z = -x * x_scale_;
cmdpub_.publish(cmd);
ROS_INFO_THROTTLE(1, "Not enough points(%d) detected, stopping the robot", n);
publishMarker(x, y, z);
if (enabled_)
cmdpub_.publish(geometry_msgs::TwistPtr(new geometry_msgs::Twist()));
publishBbox();
bool changeModeSrvCb(turtlebot_msgs::SetFollowState::Request& request,
turtlebot_msgs::SetFollowState::Response& response)
if ((enabled_ == true) && (request.state == request.STOPPED))
ROS_INFO("Change mode service request: following stopped");
cmdpub_.publish(geometry_msgs::TwistPtr(new geometry_msgs::Twist()));
enabled_ = false;
else if ((enabled_ == false) && (request.state == request.FOLLOW))
ROS_INFO("Change mode service request: following (re)started");
enabled_ = true;
response.result = response.OK;
return true;
void publishMarker(double x,double y,double z)
visualization_msgs::M
marker.header.frame_id = "/camera_rgb_optical_frame";
marker.header.stamp = ros::Time();
marker.ns = "my_namespace";
marker.id = 0;
marker.type = visualization_msgs::Marker::SPHERE;
marker.action = visualization_msgs::Marker::ADD;
marker.pose.position.x =
marker.pose.position.y =
marker.pose.position.z =
marker.pose.orientation.x = 0.0;
marker.pose.orientation.y = 0.0;
marker.pose.orientation.z = 0.0;
marker.pose.orientation.w = 1.0;
marker.scale.x = 0.1;
marker.scale.y = 0.1;
marker.scale.z = 0.1;
marker.color.a = 1.0;
marker.color.r = 1.0;
marker.color.g = 0.0;
marker.color.b = 0.0;
markerpub_.publish( marker );
void publishBbox()
double x = (min_x_ + max_x_)/2;
double y = (min_y_ + max_y_)/2;
double z = (0 + max_z_)/2;
double scale_x = (max_x_ - x)*2;
double scale_y = (max_y_ - y)*2;
double scale_z = (max_z_ - z)*2;
visualization_msgs::M
marker.header.frame_id = "/camera_rgb_optical_frame";
marker.header.stamp = ros::Time();
marker.ns = "my_namespace";
marker.id = 1;
marker.type = visualization_msgs::Marker::CUBE;
marker.action = visualization_msgs::Marker::ADD;
marker.pose.position.x =
marker.pose.position.y = -y;
marker.pose.position.z =
marker.pose.orientation.x = 0.0;
marker.pose.orientation.y = 0.0;
marker.pose.orientation.z = 0.0;
marker.pose.orientation.w = 1.0;
marker.scale.x = scale_x;
marker.scale.y = scale_y;
marker.scale.z = scale_z;
marker.color.a = 0.5;
marker.color.r = 0.0;
marker.color.g = 1.0;
marker.color.b = 0.0;
bboxpub_.publish( marker );
ros::Subscriber sub_;
ros::Publisher cmdpub_;
ros::Publisher markerpub_;
ros::Publisher bboxpub_;
PLUGINLIB_DECLARE_CLASS(turtlebot_follower, TurtlebotFollower, turtlebot_follower::TurtlebotFollower, nodelet::Nodelet);
接下来看launc***件follower.launch
建议在修改前,将原先的代码注释掉,不要删掉。
name="simulation" default="false"/&
unless="$(arg simulation)"&
file="$(find turtlebot_follower)/launch/includes/velocity_smoother.launch.xml"&
name="nodelet_manager"
value="/mobile_base_nodelet_manager"/&
name="navigation_topic" value="/cmd_vel_mux/input/navi"/&
file="$(find handsfree_hw)/launch/handsfree_hw.launch"&
file="$(find handsfree_bringup)/launch/xtion_fake_laser_openni2.launch"&
if="$(arg simulation)"&
pkg="nodelet" type="nodelet" ns="camera" name="camera_nodelet_manager" args="manager"/&
file="$(find turtlebot_follower)/launch/includes/velocity_smoother.launch.xml"&
name="nodelet_manager"
value="camera/camera_nodelet_manager"/&
name="navigation_topic" value="cmd_vel_mux/input/navi"/&
name="camera/rgb/image_color/compressed/jpeg_quality" value="22"/&
pkg="topic_tools" type="throttle" name="camera_throttle"
args="messages camera/rgb/image_color/compressed 5"/&
file="$(find turtlebot_follower)/launch/includes/safety_controller.launch.xml"/&
pkg="nodelet" type="nodelet" name="turtlebot_follower"
args="load turtlebot_follower/TurtlebotFollower camera/camera_nodelet_manager"&
from="turtlebot_follower/cmd_vel" to="/mobile_base/mobile_base_controller/cmd_vel"/&
from="depth/points" to="camera/depth/points"/&
name="enabled" value="true" /&
name="x_scale" value="1.5"/&
name="z_scale" value="1.0" /&
name="min_x" value="-0.35" /&
name="max_x" value="0.35" /&
name="min_y" value="0.1" /&
name="max_y" value="0.5" /&
name="max_z" value="1.5" /&
name="goal_z" value="0.6" /&
name="switch" pkg="turtlebot_follower" type="switch.py"/&
name="rviz" pkg="rviz" type="rviz" args="-d $(find turtlebot_follower)/follow.rviz"/&
编译,运行follow.launch 会将机器人和摄像头,rviz都启动起来,只需要运行这一个launch就可以了。
rviz中添加两个marker,pointcloud,camera。如图:
topic与frame名称与代码中要保持一致。
添加完之后,rviz显示如图:
红点代表质心,绿框代表感兴趣区域
当红点在我们身上时,机器人会跟随我们运动,注意:走动时,我们的速度要慢一点,机器人的移动速度也要调慢一点。
当感兴趣区域没有红点时,机器人停止跟随,直到出现红点。
本文已收录于以下专栏:
相关文章推荐
前两篇博客( 《基于qualcomm平台的kincet应用系列一之Turtlebot(自动跟随机器人)上中篇》),我们给大家介绍了如何在基于qualcomm的410c开发板搭载kobubi机器人和ki...
继上次《基于qualcomm平台的kincet应用系列一之Turtlebot(自动跟随机器人)上篇》文章发布后,没想到很多童鞋对这个都很感兴趣,并根据此文进行了配置操作。当然,基础好的童鞋都一次性成功...
Turtlebot入门篇
0、什么是TurtleBot?
我自己的理解就是:TurtleBot是一款移动机器人,就是主要研究让机器人自主决定应该想那个方向走,怎么绕过障碍物,最...
ROS-----深度图转激光/点云原理
下载并编译
/turtlebot/turtlebot_apps/tree/indigo/turtlebot_follower
编译依赖于
要实现turtl...
写在最前面:
SLAM特指:特指搭载传感器的主体,在没有环境先验的信息情况下,在运动过程中建立环境模型,通过估计自己的运动。
SLAM的目的是解决两个问题:1、定位
2、地图构建
ros下Kinect的“跟屁虫”
此文为很久之前工作的整理,以此作为记录,以免遗忘。纯属笔记。
实现的功能:用Kinect跟随人。
平台:Ubuntu14.04+Kinect1+全向移动平台...
实现目标:机器人检测到有人走过来,迎上去并开始追踪。
追踪算法使用kcf算法,关于kcf追踪的ros库在github地址
/TianyeAlex/tracke...
他的最新文章
讲师:宋宝华
讲师:何宇健
您举报文章:
举报原因:
原文地址:
原因补充:
(最多只允许输入30个字)

参考资料

 

随机推荐