Python rospy 模块,sleep() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用rospy.sleep()

项目:openag_brain    作者:OpenAgInitiative    | 项目源码 | 文件源码
def test_publish_to_topics(self):
        topic_ending = "desired"
        rospy.logdebug("Sleeping for 5 seconds to let ROS spin up...")
        rospy.sleep(5)
        for variable, value in self.variables:
            # Publish to each variable/desired topic to see if all of the
            # actuators come on as expected.
            topic_string = variable + "/" + topic_ending
            rospy.logdebug("Testing Publishing to " + topic_string)
            pub_desired = rospy.Publisher(topic_string,
                                               Float64, queue_size=10)
            sub_desired = rospy.Subscriber(topic_string, Float64, self.callback)
            rospy.sleep(2)
            print(self.namespace + "/" + topic_string)
            for _ in range(NUM_TIMES_TO_PUBLISH):
                pub_desired.publish(value)
                rospy.sleep(1)
            rospy.sleep(2)
            pub_desired.publish(0)
项目:Story_CoWriting    作者:alexis-jacq    | 项目源码 | 文件源码
def ros_loop(test):
    while True:

        rospy.Subscriber('human_turn_topic', String, human_turn)
        rospy.Subscriber('human_chosen_topic', String, have_chosen)
        rospy.Subscriber('human_predict_turn_topic', String, human_predict)
        rospy.Subscriber('robot_turn_topic', String, robot_turn)
        rospy.Subscriber('robot_chosen_topic', String, have_chosen)
        rospy.Subscriber('story_telling', String, new_phrase)
        rospy.Subscriber('new_element', String, new_element)

        rospy.sleep(0.1)

    rospy.spin()


################################################
项目:Story_CoWriting    作者:alexis-jacq    | 项目源码 | 文件源码
def ros_loop(test):
    while True:

        rospy.Subscriber('human_turn_topic', String, human_turn)
        rospy.Subscriber('human_chosen_topic', String, have_chosen)
        rospy.Subscriber('human_predict_turn_topic', String, human_predict)
        rospy.Subscriber('robot_turn_topic', String, robot_turn)
        rospy.Subscriber('robot_chosen_topic', String, have_chosen)
        rospy.Subscriber('story_telling', String, new_phrase)
        rospy.Subscriber('new_element', String, new_element)

        rospy.sleep(0.1)

    rospy.spin()


################################################
项目:Story_CoWriting    作者:alexis-jacq    | 项目源码 | 文件源码
def ros_loop(test):
    while True:

        rospy.Subscriber('human_turn_topic', String, human_turn)
        rospy.Subscriber('human_chosen_topic', String, have_chosen)
        rospy.Subscriber('human_predict_turn_topic', String, human_predict)
        rospy.Subscriber('robot_turn_topic', String, robot_turn)
        rospy.Subscriber('robot_chosen_topic', String, have_chosen)
        rospy.Subscriber('story_telling', String, new_phrase)
        rospy.Subscriber('new_element', String, new_element)

        rospy.sleep(0.1)

    rospy.spin()


################################################
项目:promplib    作者:baxter-flowers    | 项目源码 | 文件源码
def record_motion(self):
        for countdown in ['ready?', 3, 2, 1, "go"]:
            self.say('{}'.format(countdown), blocking=False)
            rospy.sleep(1)
        self.arm.recorder.start(10)
        rospy.loginfo("Recording...")

        choice = ""
        while choice != 'stop' and not rospy.is_shutdown():
            choice = self.read_user_input(['stop'])

        joints, eef = self.arm.recorder.stop()
        self.say('Recorded', blocking=True)
        if len(joints.joint_trajectory.points) == 0:
            self.say('This demo is empty, please retry')
        else:
            target_id = self.promp.add_demonstration(joints, eef)
            self.say('Added to Pro MP {}'.format(target_id), blocking=False)
项目:easy_handeye    作者:IFL-CAMP    | 项目源码 | 文件源码
def __init__(self, context):
        super(RqtCalibrationMovements, self).__init__(context)
        # Give QObjects reasonable names
        self.setObjectName('LocalMover')

        rospy.sleep(1.0)

        # Process standalone plugin command-line arguments
        from argparse import ArgumentParser
        parser = ArgumentParser()
        # Add argument(s) to the parser.
        parser.add_argument("-q", "--quiet", action="store_true",
                            dest="quiet",
                            help="Put plugin in silent mode")
        args, unknowns = parser.parse_known_args(context.argv())
        if not args.quiet:
            print 'arguments: ', args
            print 'unknowns: ', unknowns

        # Create QWidget
        self._widget = CalibrationMovementsGUI()
        if context.serial_number() > 1:
            self._widget.setWindowTitle(self._widget.windowTitle() + (' (%d)' % context.serial_number()))
        # Add widget to the user interface
        context.add_widget(self._widget)
项目:intera_sdk    作者:RethinkRobotics    | 项目源码 | 文件源码
def test_light_interface(light_name='head_green_light'):
    """Blinks a desired Light on then off."""
    l = Lights()
    rospy.loginfo("All available lights on this robot:\n{0}\n".format(
                                               ', '.join(l.list_all_lights())))
    rospy.loginfo("Blinking Light: {0}".format(light_name))
    on_off = lambda x: 'ON' if l.get_light_state(x) else 'OFF'
    rospy.loginfo("Initial state: {0}".format(on_off(light_name)))
    # turn on light
    l.set_light_state(light_name, True)
    rospy.sleep(1)
    rospy.loginfo("New state: {0}".format(on_off(light_name)))
    # turn off light
    l.set_light_state(light_name, False)
    rospy.sleep(1)
    rospy.loginfo("New state: {0}".format(on_off(light_name)))
    # turn on light
    l.set_light_state(light_name, True)
    rospy.sleep(1)
    rospy.loginfo("New state: {0}".format(on_off(light_name)))
    # reset output
    l.set_light_state(light_name, False)
    rospy.sleep(1)
    rospy.loginfo("Final state: {0}".format(on_off(light_name)))
项目:intera_sdk    作者:RethinkRobotics    | 项目源码 | 文件源码
def revalidate(self, timeout, invalidate_state=True, invalidate_config=True):
        """
        invalidate the state and config topics, then wait up to timeout
        seconds for them to become valid again.
        return true if both the state and config topic data are valid
        """
        if invalidate_state:
            self.invalidate_state()
        if invalidate_config:
            self.invalidate_config()
        timeout_time = rospy.Time.now() + rospy.Duration(timeout)
        while not self.is_state_valid() and not rospy.is_shutdown():
            rospy.sleep(0.1)
            if timeout_time < rospy.Time.now():
                rospy.logwarn("Timed out waiting for node interface valid...")
                return False
        return True
项目:dashgo    作者:EAIBOT    | 项目源码 | 文件源码
def connect(self):
        try:
            print "Connecting to Arduino on port", self.port, "..."
            self.port = Serial(port=self.port, baudrate=self.baudrate, timeout=self.timeout, writeTimeout=self.writeTimeout)
            # The next line is necessary to give the firmware time to wake up.
            time.sleep(1)
            test = self.get_baud()
            if test != self.baudrate:
                time.sleep(1)
                test = self.get_baud()   
                if test != self.baudrate:
                    raise SerialException
            print "Connected at", self.baudrate
            print "Arduino is ready."

        except SerialException:
            print "Serial Exception:"
            print sys.exc_info()
            print "Traceback follows:"
            traceback.print_exc(file=sys.stdout)
            print "Cannot connect to Arduino!"
            os._exit(1)
项目:mhri    作者:mhri    | 项目源码 | 文件源码
def handle_wait_event(self, goal):
        d = datetime.datetime.fromtimestamp(rospy.get_time())

        query_result_count = 0;
        while query_result_count == 0 and self.wait_for_event_server.is_active() == True:
            if self.wait_for_event_server.is_preempt_requested():
                result = WaitForEventResult()
                result.result = False
                result.error_msg = 'The client cancel goal.'
                self.wait_for_event_server.set_preempted(result)
                return result

            for i in range(len(goal.event_name)):
                memory_name = goal.event_name[i]
                memory_query = json.loads(goal.query[i])
                memory_query['time'] = {"$gte": d}

                query_result = self.collector[memory_name].find(memory_query)
                query_result_count += query_result.count()

            rospy.sleep(0.2)

        result = WaitEventResult()
        result.result = True
        self.wait_for_event_server.set_succeeded(result)
项目:mr-gan    作者:Healthcare-Robotics    | 项目源码 | 文件源码
def zeroData(self):
        self.RGripRFingerForceMean = None
        self.RGripRFingerForceRecent = []
        self.accelMean = None
        self.accelRecent = []
        self.temperatureMean = None
        self.temperatureRecent = []
        self.contactmicMean = None
        self.contactmicRecent = []
        self.zeroing = True
        self.statePublisher.publish('zeroing')
        while self.RGripRFingerForceMean is None or self.accelMean is None or self.temperatureMean is None or self.contactmicMean is None:
            rospy.sleep(0.01)
        self.statePublisher.publish('stop')
        self.zeroing = False
        print 'Data zeroed'
项目:needybot-core    作者:needybot    | 项目源码 | 文件源码
def setUp(self):
        self.client = UIClient()
        self.node = UINode()
        self.node.start()

        self.msg = None
        '''
        self.instruct_pub = rospy.Publisher(
            nb_channels.Messages.instruct.value,
            String,
            queue_size=10
        )
        '''

        self.subscribe()

        rospy.sleep(0.1)
项目:needybot-core    作者:needybot    | 项目源码 | 文件源码
def setUp(self, MockSequence):
        server = TaskServer('needybot')
        self.server_client = TaskServerClient('needybot')

        self.boot = MockSequence()
        server.add_boot_sequence(self.boot)

        self.idle = Task("idle")
        self.task_one = Task("task_one")

        self.idle.steps['load'].entered_handler = MagicMock()
        self.idle.instruct = MagicMock()

        self.task_one.steps['load'].entered_handler = MagicMock()
        self.task_one.steps['abort'].entered_handler = MagicMock()
        self.task_one.instruct = MagicMock()

        self.server_client.boot(EmptyRequest())
        rospy.sleep(0.1)
项目:needybot-core    作者:needybot    | 项目源码 | 文件源码
def monitor(self):
        # Wait until subscriber on instruct message is present
        notified = False
        while not rospy.is_shutdown():
            _, subscribers, _ = Master('/needybot').getSystemState()
            if dict(subscribers).get(nb_channels.Messages.instruct.value) is not None:
                if self.is_connected == False:
                    self.connected_pub.publish()
                self.is_connected = True
            else:
                if self.is_connected or not notified:
                    notified = True
                    self.disconnected_pub.publish()
                self.is_connected = False 

            rospy.sleep(0.1)
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def neck_control(self, positions, wait=True):
        """ Control all 3 points of the head:
             lowerNeckPitch - 0 to 0.5 - looks down
             neckYaw -  -1.0 to 1.0 - looks left and right
             upperNeckPitch - -0.5 to 0.0 - looks up
        """
        trajectories = []

        for p in positions:
            trajectories.append(self.zarj.make_joint(self.MOVE_TIME, p))

        msg = NeckTrajectoryRosMessage()
        msg.joint_trajectory_messages = trajectories
        msg.unique_id = self.zarj.uid

        self.neck_publisher.publish(msg)
        if wait:
            rospy.sleep(self.MOVE_TIME + 0.1)
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def set_arm_configuration(self, sidename, joints, movetime = None):
        if sidename == 'left':
            side = ArmTrajectoryRosMessage.LEFT
        elif sidename == 'right':
            side = ArmTrajectoryRosMessage.RIGHT
        else:
            rospy.logerr("Unknown side {}".format(sidename))
            return

        if movetime is None:
            movetime = self.ARM_MOVE_TIME

        for i in range(len(joints)):
            if math.isnan(joints[i]):
                joints[i] = self.last_arms[side][i]

        msg = self.make_arm_trajectory(side, ArmTrajectoryRosMessage.OVERRIDE, movetime, joints)
        self.last_arms[side] = deepcopy(joints)
        log('Setting {} arm to {}'.format(sidename, joints))
        self.arm_trajectory_publisher.publish(msg)
        rospy.sleep(movetime)
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def test(self):
#       move_arm(self)
#       move_hand(self)
#       self.plot_arms()
        rospy.sleep(0.1)
#       self.mashButton()
        rospy.sleep(0.1)
        self.close_grasp(RIGHT)
        rospy.sleep(0.1)
        self.open_grasp(RIGHT)
        rospy.sleep(0.1)
        self.close_grasp(LEFT)
        rospy.sleep(0.1)
        self.open_grasp(LEFT)
        rospy.sleep(0.1)
        self.close_grasp(LEFT)
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def start_processing(self):
        """ Start processing data """
        if self.disabled:
            rospy.loginfo("Processing started")
        self.disabled = False

        if self.sub_left is None:
            self.sub_left = rospy.Subscriber(
                "/multisense/camera/left/image_color", Image,
                self.left_color_detection)
            rospy.sleep(0.1)
        if self.sub_right is None:
            self.sub_right = rospy.Subscriber(
                "/multisense/camera/right/image_color", Image,
                self.right_color_detection)
            rospy.sleep(0.1)
        if self.sub_cloud is None:
            self.sub_cloud = rospy.Subscriber(
                "/multisense/image_points2_color", PointCloud2,
                self.stereo_cloud)
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def run(self, distance, angle, snap_to, options):
        """ Run our code """
        rospy.loginfo("Start test code")
        self.task_subscriber = rospy.Subscriber("/srcsim/finals/task", Task, self.task_status)

        rate = rospy.Rate(1) # 10hz
        if self.task_subscriber.get_num_connections() == 0:
            rospy.loginfo('waiting for task publisher...')
            while self.task_subscriber.get_num_connections() == 0:
                rate.sleep()

        if distance > 0.0001 or distance < -0.005:
            self.zarj_os.walk.forward(distance, True)
            while not self.zarj_os.walk.walk_is_done():
                rospy.sleep(0.01)

        if abs(angle) > 0.0 or "turn" in options:
            align = "align" in options
            small_forward = 0.005 if align else None
            self.zarj_os.walk.turn(angle, snap_to, align_to_snap = align, small_forward = small_forward)
            while not self.zarj_os.walk.walk_is_done():
                rospy.sleep(0.01)
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def start(self, _=None):
        """ Start the macro """
        joints = deepcopy(LimbTypes.arm_styles['pass_football'])
        joints = LimbTypes.invert_arm_configuration('right', joints)
        log("Setting right hand to pass a football")
        self.fc.zarj.hands.set_arm_configuration('right', joints)

        joints = deepcopy(LimbTypes.arm_styles['tuck'])
        joints = LimbTypes.invert_arm_configuration('left', joints)
        log("Tucking left arm, setting left hand to opposition")
        self.fc.zarj.hands.set_arm_configuration('left', joints)

        joints = deepcopy(LimbTypes.hand_styles['oppose'])
        joints = LimbTypes.invert_hand_configuration('left', joints)
        self.fc.zarj.hands.set_hand_configuration('left', joints)

        log("Looking down")
        self.fc.zarj.neck.neck_control([0.5, 0, 0], True)

        rospy.sleep(0.5)

        self.fc.send_stereo_camera()
        log("Pick the two top corners of the array handle.")
        self.done = True
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def start(self, _=None):
        """ Start the macro """

        joints = deepcopy(LimbTypes.hand_styles['open'])
        joints = LimbTypes.invert_hand_configuration('left', joints)
        self.fc.zarj.hands.set_hand_configuration('left', joints)

        log("Looking down and left")
        self.fc.zarj.neck.neck_control([0.5, 1.0, 0], True)

        rospy.sleep(0.5)

        self.fc.clear_points()
        if not self.chain_okay:
            self.fc.send_stereo_camera()
            log("Click the button.")

        self.done = True
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def start(self, _=None):
        """ Start the macro """
        anchor = self.fc.assure_anchor("center")
        if anchor is None:
            raise ZarjConfused("Cannot find the choke")

        x = anchor.adjusted[0]
        y = anchor.adjusted[1] + (.05 * math.sin(math.radians(anchor.angle)))
        z = anchor.adjusted[2] + 0.14

        log("Commanding hand above cable end")
        msg = ZarjMovePalmCommand('right', False, x, y, z, -1 * anchor.angle, 20, -90, True)
        self.fc.process_palm_msg(msg)
        self.done = True

        log("Giving the hand a long time to settle down.")
        rospy.sleep(3.0)
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def start(self, _=None):
        """ Start the macro """
        anchor = self.fc.assure_anchor("plugin_start")
        if anchor is None:
            raise ZarjConfused("Cannot find the plug")

        x = anchor.adjusted[0]
        y = anchor.adjusted[1] + 0.05
        z = anchor.adjusted[2]

        log("Commanding hand above and left of plug; x {}, y {}, z {}".format(x, y, z))
        msg = ZarjMovePalmCommand('right', False, x, y, z, 0, 0, 45, True)
        self.fc.process_palm_msg(msg)
        self.done = True

        log("Giving the hand a long time to settle down.")
        rospy.sleep(3.0)
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def _spin_wheel(self):
        """ Spin the wheel. """
        joints = deepcopy(LimbTypes.arm_styles['door_out'])
        joints = LimbTypes.invert_arm_configuration('left', joints)
        self.fc.zarj.hands.set_arm_configuration('left', joints, 1.0)
        rospy.sleep(0.5)

        joints = deepcopy(LimbTypes.arm_styles['door_top'])
        joints = LimbTypes.invert_arm_configuration('left', joints)
        self.fc.zarj.hands.set_arm_configuration('left', joints, 1.0)
        rospy.sleep(0.5)

        joints = deepcopy(LimbTypes.arm_styles['door_bottom'])
        joints = LimbTypes.invert_arm_configuration('left', joints)
        self.fc.zarj.hands.set_arm_configuration('left', joints, 1.5)
        rospy.sleep(0.5)
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def start(self, _=None):
        """ Start the macro """
        joints = deepcopy(LimbTypes.arm_styles['pass_football'])
        joints = LimbTypes.invert_arm_configuration('right', joints)
        log("Setting right hand to pass a football")
        self.fc.zarj.hands.set_arm_configuration('right', joints)

        joints = deepcopy(LimbTypes.arm_styles['king_tut'])
        joints = LimbTypes.invert_arm_configuration('left', joints)
        log("Extending left arm, setting left hand to grab down")
        self.fc.zarj.hands.set_arm_configuration('left', joints)

        joints = deepcopy(LimbTypes.hand_styles['open_down'])
        joints = LimbTypes.invert_hand_configuration('left', joints)
        self.fc.zarj.hands.set_hand_configuration('left', joints)

        rospy.sleep(0.5)

        self.fc.send_stereo_camera()
        log("Pick two points along the _GRIP_, long way (antenna <--> base) of "
            "the detector. Generally just a little in toward the robot from "
            "the center line")
项目:srcsim2017    作者:ZarjRobotics    | 项目源码 | 文件源码
def start(self, _=None):
        """ Start the macro """
        anchor = self.fc.assure_anchor("center")
        if anchor is None:
            return False

        x = anchor.adjusted[0]
        y = anchor.adjusted[1]
        z = 0.838 + 0.1

        log("Commanding hand to position just above object")
        msg = ZarjMovePalmCommand('left', False, x, y, z, -1 * anchor.angle, 25, 90, True)
        self.fc.process_palm_msg(msg)

        log("Giving it a while to settle")
        rospy.sleep(3.0)

        log("Commanding hand onto object")
        msg = ZarjMovePalmCommand('left', False, x, y, z - .08, -1 * anchor.angle, 25, 90, True)
        self.fc.process_palm_msg(msg)

        rospy.sleep(0.5)
项目:master_robot_strage    作者:nwpu-basketball-robot    | 项目源码 | 文件源码
def execute(self, ud):
        rospy.loginfo('Start Return to home!!')
        if self.preempt_requested():
            self.service_preempt()
            return 'failed'
        (current_x,current_y) = self.cmd_position.get_robot_current_x_y()
        self.cmd_move.move_to(x = 2-current_x,y = -1.5-current_y)
        # self.cmd_move.move_to(y =  -current_y)
        self.cmd_turn.turn_to(math.pi)
        rospy.sleep(0.5)
        self.cmd_return.go_close_line()
        # self.cmd_move.move_to(x = -2.6)

        return 'successed'

#??????????????????????
项目:master_robot_strage    作者:nwpu-basketball-robot    | 项目源码 | 文件源码
def execute(self, ud):
        rospy.loginfo("Start Shovel ball!!!!!")
        if self.preempt_requested():
            self.service_preempt()
            return 'failed'
        self.cmd_shovel.control_shovel(control_type=0)
        rospy.sleep(0.5)
        return 'successed'



############################################
###########pass_ball_first##################
############################################

#?????????????????????
项目:master_robot_strage    作者:nwpu-basketball-robot    | 项目源码 | 文件源码
def execute(self, ud):
        if self.preempt_requested():
            self.service_preempt()
            return 'failed'
        rospy.loginfo("x = %s"%ud.column_x)
        rospy.loginfo("theta = %s"%ud.column_theta)
        self.move_cmd.move_to(x = ud.column_x - 2.3)
        self.move_cmd.move_to( yaw=ud.column_theta)

        rospy.sleep(1)
        (x,column_theta) = find_cylinder_state.find_cylinder_state().find_cylinder()
        # self.move_cmd.move_to(x - 2.3)
        self.move_cmd.move_to( yaw= column_theta)
        return 'successed'


############################################
###############Find Ball####################
############################################

#???????????????????????
# ball_x  ????????x??
# ball_y  ????????y??
# ball_theta ?????x????
#??? ???????????????x????????
项目:master_robot_strage    作者:nwpu-basketball-robot    | 项目源码 | 文件源码
def execute(self, ud):
        if self.preempt_requested():
            self.service_preempt()
            return 'failed'
        rospy.sleep(0.5)
        (x,y,theta) = find_volleyball.find_volleyball().find_volleyball()
        rospy.loginfo("x = %s,y = %s ",x,y)
        self.move_cmd.turn_to(theta*0.95)
        self.move_cmd.move_to(x = math.sqrt(x*x+y*y) - 0.25)
        # self.move_cmd.move_to(y = y)
        # self.move_cmd.move_to(x = x - 0.2 )
        return 'successed'


############################################
################Control#####################
############################################
项目:nachi_project    作者:Nishida-Lab    | 项目源码 | 文件源码
def waitForInitialPose(self, next_topic, timeout=None):
        counter = 0
        while not rospy.is_shutdown():
            counter = counter + 1
            if timeout and counter >= timeout:
                return False
            try:
                self.marker_lock.acquire()
                self.initialize_poses = True
                topic_suffix = next_topic.split("/")[-1]
                if self.initial_poses.has_key(topic_suffix):
                    self.pre_pose = PoseStamped(pose=self.initial_poses[topic_suffix])
                    self.initialize_poses = False
                    return True
                else:
                    rospy.logdebug(self.initial_poses.keys())
                    rospy.loginfo("Waiting for pose topic of '%s' to be initialized",
                                  topic_suffix)
                    rospy.sleep(1)
            finally:
                self.marker_lock.release()
项目:follow_waypoints    作者:danielsnider    | 项目源码 | 文件源码
def __init__(self):
        State.__init__(self, outcomes=['success'], input_keys=['waypoints'], output_keys=['waypoints'])
        # Create publsher to publish waypoints as pose array so that you can see them in rviz, etc.
        self.poseArray_publisher = rospy.Publisher('/waypoints', PoseArray, queue_size=1)

        # Start thread to listen for reset messages to clear the waypoint queue
        def wait_for_path_reset():
            """thread worker function"""
            global waypoints
            while not rospy.is_shutdown():
                data = rospy.wait_for_message('/path_reset', Empty)
                rospy.loginfo('Recieved path RESET message')
                self.initialize_path_queue()
                rospy.sleep(3) # Wait 3 seconds because `rostopic echo` latches
                               # for three seconds and wait_for_message() in a
                               # loop will see it again.
        reset_thread = threading.Thread(target=wait_for_path_reset)
        reset_thread.start()
项目:cu-perception-manipulation-stack    作者:correlllab    | 项目源码 | 文件源码
def zero_sensor(self):
        rospy.loginfo("Zeroing sensor...")
        # Wait a little bit until we get a message from the sensor
        rospy.sleep(1)
        self.tip_offset, self.inside_offset = (np.zeros_like(self.tip),
                                               np.zeros_like(self.inside))
        inside_vals, tip_vals = [], []
        r = rospy.Rate(10)
        while not rospy.is_shutdown() and len(inside_vals) < 10:
            inside, tip = self.inside, self.tip
            # If there are zero values (most likely becase a message
            # has not yet been received), skip that. We could also
            # initialize them with nans to find out if there's a
            # problem
            if all(inside) and all(tip):
                inside_vals.append(inside)
                tip_vals.append(tip)
            r.sleep()
        # Center around 5000, so ranges are similar to when not centering
        self.inside_offset = np.min(inside_vals, axis=0) - 5000
        self.tip_offset = np.min(tip_vals, axis=0) - 5000
        rospy.loginfo("Zeroing finished")
项目:cu-perception-manipulation-stack    作者:correlllab    | 项目源码 | 文件源码
def y_block_side_scan(self, pose, direction=(0,0,1), y_dir=0.005, sensor_index=13, max_inside = 1000, timeout=50):    
        while True:
            if(not self.sensors_updated()):
                return
            #rospy.loginfo("Moving to touch (at {})".format(self.inside[6]))
            if self.inside[sensor_index] < max_inside:
                return
            else:
                scaled_direction = (di / 100 for di in direction)
                #print("Scaled direction: ", scaled_direction)
                v_cartesian = self._vector_to(scaled_direction)
                v_cartesian[0] = y_dir
                #print("cartesian: ", v_cartesian)
                v_joint = self.compute_joint_velocities(v_cartesian)
                #print("joint    : ", v_joint)
                self.limb.set_joint_velocities(v_joint)
                rospy.sleep(0.25)
项目:cu-perception-manipulation-stack    作者:correlllab    | 项目源码 | 文件源码
def limb_pose(limb_name):
    """Get limb pose at time of OK cuff button press."""
    button = CuffOKButton(limb_name)
    rate = rospy.Rate(20)  # rate at which we check whether button was
                           # pressed or not
    rospy.loginfo(
        'Waiting for %s OK cuff button press to save pose' % limb_name)
    while not button.pressed and not rospy.is_shutdown():
        rate.sleep()
    joint_pose = baxter_interface.Limb(limb_name).joint_angles()
    # Now convert joint coordinates to end effector cartesian
    # coordinates using forward kinematics.
    kinematics = baxter_kinematics(limb_name)
    endpoint_pose = kinematics.forward_position_kinematics(joint_pose)
    # How is this different from
    # baxter_interface.Limb(limb_name).endpoint_pose()
    print()
    print('baxter_interface endpoint pose:')
    print(baxter_interface.Limb(limb_name).endpoint_pose())
    print('pykdl forward kinematics endpoint pose:')
    print(endpoint_pose)
    print()
    return endpoint_pose
项目:cu-perception-manipulation-stack    作者:correlllab    | 项目源码 | 文件源码
def pick(self, pose, direction=(0, 0, 1), distance=0.1, controller=None):
        """Go to pose + pick_direction * pick_distance, open, go to pose,
        close, go to pose + pick_direction * pick_distance.

        """
        print(pose)
        pregrasp_pose = self.translate(pose, direction, distance)
        print(pregrasp_pose)
        self.limb.set_joint_position_speed(0.1)
        self.move_ik(pregrasp_pose)
        # We want to block end effector opening so that the next
        # movement happens with the gripper fully opened.
        self.gripper.open(block=True)
        self.limb.set_joint_position_speed(0.05)
        self.move_ik(pose)
        if controller is not None:
            print ('controller ON!!')
            controller.enable()
            rospy.sleep(5)
            controller.disable()
        self.gripper.close(block=True)
        #self.gripper.command_position(45, block=True)
        rospy.sleep(2)
        #self.move_ik(pregrasp_pose)
项目:cu-perception-manipulation-stack    作者:correlllab    | 项目源码 | 文件源码
def pick(self, pose, direction=(0, 0, 1), distance=0.1, controller=None):
        """Go to pose + pick_direction * pick_distance, open, go to pose,
        close, go to pose + pick_direction * pick_distance.

        """
        rospy.logdebug("pick pose: %s" % pose)
        pregrasp_pose = self.translate(pose, direction, distance)
        rospy.logdebug("pregrasp_pose: %s" % pregrasp_pose)
        rospy.sleep(1)
        self.move_ik(pregrasp_pose)
        # We want to block end effector opening so that the next
        # movement happens with the gripper fully opened.
        self.gripper.open(block=True)
        self.move_ik(pose)
        if controller is not None:
            print ('controller ON!!')
            controller.enable()
            rospy.sleep(5)
            controller.disable()
        self.gripper.close(block=True)
        self.move_ik(pregrasp_pose)
项目:cu-perception-manipulation-stack    作者:correlllab    | 项目源码 | 文件源码
def main(limb_name, reset):
    """
    Parameters
    ----------
    limb : str
        Which limb to use. Choices are {'left', 'right'}
    reset : bool
        Whether to use previously saved picking and placing poses or
        to save new ones by using 0g mode and the OK cuff buttons.
    """
    # Initialise ros node
    rospy.init_node("pick_and_place", anonymous=False)


    b = Baxter(limb_name)
    place_pose = limb_pose(limb_name).tolist()
    print (place_pose)
    block = Blocks()
    rospy.sleep(4)
    pick_pose = block.pose
    rospy.loginfo('Block pose: %s' % pick_pose)
    #import ipdb; ipdb.set_trace()
    b.pick(pick_pose, controller=None)
    b.place(place_pose)
项目:cu-perception-manipulation-stack    作者:correlllab    | 项目源码 | 文件源码
def limb_pose(limb_name):
    """Get limb pose at time of OK cuff button press."""
    button = CuffOKButton(limb_name)
    rate = rospy.Rate(20)  # rate at which we check whether button was
                           # pressed or not
    rospy.loginfo(
        'Waiting for %s OK cuff button press to save pose' % limb_name)
    while not button.pressed and not rospy.is_shutdown():
        rate.sleep()
    joint_pose = baxter_interface.Limb(limb_name).joint_angles()
    # Now convert joint coordinates to end effector cartesian
    # coordinates using forward kinematics.
    kinematics = baxter_kinematics(limb_name)
    endpoint_pose = kinematics.forward_position_kinematics(joint_pose)
    #print (endpoint_pose)
    return endpoint_pose
项目:cu-perception-manipulation-stack    作者:correlllab    | 项目源码 | 文件源码
def goto(self, from_frame, to_frame):
        '''
        Calculates the transfrom from from_frame to to_frame
        and commands the arm in cartesian mode.
        '''
        try:
            trans = self.tfBuffer.lookup_transform(from_frame, to_frame, rospy.Time())
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
            rate.sleep()
            # continue

        translation  = [trans.transform.translation.x, trans.transform.translation.y, trans.transform.translation.z]
        rotation = [trans.transform.rotation.x, trans.transform.rotation.y, trans.transform.rotation.z, trans.transform.rotation.w]
        pose_value = translation + rotation
        #second arg=0 (absolute movement), arg = '-r' (relative movement)
        self.cmmnd_CartesianPosition(pose_value, 0)
项目:cu-perception-manipulation-stack    作者:correlllab    | 项目源码 | 文件源码
def cmmnd_makeContact_USBPlug(self, sensitivity):
        rate = rospy.Rate(100)
        while (self.bump_finger_1 < sensitivity)and not rospy.is_shutdown():
            print (self.bump_finger_1)
            self.cmmnd_CartesianVelocity([-0.03,0,0,0,0,0,1])
            rate.sleep()
        print ("contact made with the ground")

    # def pick_USBlight_1(self, current_finger_position):
    #     ii = 0
    #     rate = rospy.Rate(100)
    #     while self.touch_finger_3 != True and not rospy.is_shutdown():
    #         current_finger_position[0] += 5 # slowly close finger_1 until contact is made
    #         print (current_finger_position[0])
    #         self.cmmnd_FingerPosition([current_finger_position[0], current_finger_position[1], current_finger_position[2]])
    #         rate.sleep()
    #     self.touch_finger_1 = False
    #     return current_finger_position
项目:cu-perception-manipulation-stack    作者:correlllab    | 项目源码 | 文件源码
def pick(self, pose, direction=(0, 0, 1), distance=0.1):
        """Go to pose + pick_direction * pick_distance, open, go to pose,
        close, go to pose + pick_direction * pick_distance.

        """
        pregrasp_pose = self.translate(pose, direction, distance)

        self.move_ik(pregrasp_pose)
        rospy.sleep(0.5)
        # We want to block end effector opening so that the next
        # movement happens with the gripper fully opened. In Baxter,
        # that involves sublcassing the gripper class
        self.gripper.open()
        self.move_ik(pose)
        rospy.sleep(0.5)
        self.gripper.close()
        rospy.sleep(0.5)
        self.move_ik(pregrasp_pose)
项目:gps    作者:cbfinn    | 项目源码 | 文件源码
def run_trial_tf(self, policy, time_to_run=5):
        """ Run an async controller from a policy. The async controller receives observations from ROS subscribers
         and then uses them to publish actions."""
        should_stop = False
        consecutive_failures = 0
        start_time = time.time()
        while should_stop is False:
            if self.observations_stale is False:
                consecutive_failures = 0
                last_obs = tf_obs_msg_to_numpy(self._tf_subscriber_msg)
                action_msg = tf_policy_to_action_msg(self.dU,
                                                     self._get_new_action(policy, last_obs),
                                                     self.current_action_id)
                self._tf_publish(action_msg)
                self.observations_stale = True
                self.current_action_id += 1
            else:
                rospy.sleep(0.01)
                consecutive_failures += 1
                if time.time() - start_time > time_to_run and consecutive_failures > 5:
                    # we only stop when we have run for the trial time and are no longer receiving obs.
                    should_stop = True
        rospy.sleep(0.25)  # wait for finished trial to come in.
        result = self._trial_service._subscriber_msg
        return result  # the trial has completed. Here is its message.
项目:tbotnav    作者:patilnabhi    | 项目源码 | 文件源码
def __init__(self):
        self.node_name = "face_recog_fisher"
        rospy.init_node(self.node_name)

        rospy.on_shutdown(self.cleanup)
        self.bridge = CvBridge()
        self.face_names = StringArray()
        self.all_names = StringArray()

        self.size = 4
        face_haar = 'haarcascade_frontalface_default.xml'
        self.haar_cascade = cv2.CascadeClassifier(face_haar)
        self.face_dir = 'face_data_fisher'
        self.model = cv2.createFisherFaceRecognizer()
        # self.model = cv2.createEigenFaceRecognizer()

        (self.im_width, self.im_height) = (112, 92)        

        rospy.loginfo("Loading data...")
        # self.fisher_train_data()
        self.load_trained_data()
        rospy.sleep(3)        

        # self.img_sub = rospy.Subscriber("/asus/rgb/image_raw", Image, self.img_callback)
        self.img_sub = rospy.Subscriber("/usb_cam/image_raw", Image, self.img_callback)

        # self.img_pub = rospy.Publisher('face_img', Image, queue_size=10)
        self.name_pub = rospy.Publisher('face_names', StringArray, queue_size=10)
        self.all_names_pub = rospy.Publisher('all_face_names', StringArray, queue_size=10)
        rospy.loginfo("Detecting faces...")
项目:tbotnav    作者:patilnabhi    | 项目源码 | 文件源码
def find_station(self, station_id):
        station_loc = []
        if station_id < 2:
            return [0.0, 0.0]
        station_loc = self.qr_tag_loc(station_id)
        count=0
        while not station_loc:
            if count == 12:
                break
            self.rotate_tbot(90.0)
            rospy.sleep(4)
            station_loc = self.qr_tag_loc(station_id)
            # print station_loc
            # rospy.sleep(3)
            count += 1
        return station_loc
项目:tbotnav    作者:patilnabhi    | 项目源码 | 文件源码
def find_person(self, name):
        # cv2.imshow("Face Image", self.face_img)
        # cv2.waitKey(3)

        count=0
        found = False
        while count < 6 and found != True:
            for i in range(len(self.face_names)):
                if self.face_names[i] == name:
                    print self.face_names[i]
                    return True
                    break
            count += 1
            self.rotate_tbot(180.0, 45.0/2)
            rospy.sleep(5)
            # print count
        return found
项目:APEX    作者:ymollard    | 项目源码 | 文件源码
def go_to_start(self, duration=5):
        d = {"abs_z": 0,
             "bust_y": 0,
             "bust_x": 0,
             "head_z": 0,
             "head_y": 20,
             "l_shoulder_y": 0,
             "l_shoulder_x": 0,
             "l_arm_z": 20,
             "l_elbow_y": 0,
             "r_shoulder_y": 0,
             "r_shoulder_x": 0,
             "r_arm_z": 0,
             "r_elbow_y": 0}
        self.torso.set_compliant(False)
        self.torso.reach(d, duration)
        rospy.sleep(duration)
项目:APEX    作者:ymollard    | 项目源码 | 文件源码
def execute_iteration(self, task, method, iteration, trial, num_iterations):
        rospy.logwarn("Controller starts iteration {} {}/{} trial {}".format(method, iteration, num_iterations, trial))
        rospy.wait_for_service('ergo/reset')  # Ensures Ergo keeps working or wait till it reboots

        # After resuming, we keep the same iteration
        if self.perception.has_been_pressed('buttons/help'):
            rospy.sleep(1.5)  # Wait for the robot to fully stop
            self.recorder.record(task, method, trial, iteration)
            self.perception.switch_led('button_leds/pause', True)
            recording = self.perception.record(human_demo=True, nb_points=self.params['nb_points'])
            self.torso.set_torque_max(self.torso_params['torques']['reset'])
            self.torso.reset(slow=True)
            return True
        else:
            trajectory = self.learning.produce(skill_to_demonstrate=self.demonstrate).torso_trajectory
            self.torso.set_torque_max(self.torso_params['torques']['motion'])
            self.recorder.record(task, method, trial, iteration) 
            self.perception.switch_led('button_leds/pause', True)
            self.torso.execute_trajectory(trajectory)  # TODO: blocking, non-blocking, action server?
            recording = self.perception.record(human_demo=False, nb_points=self.params['nb_points'])
            self.perception.switch_led('button_leds/pause', False)
            recording.demo.torso_demonstration = JointTrajectory()
            self.torso.set_torque_max(80)
            self.torso.reset(slow=False)
            return self.learning.perceive(recording.demo)
项目:APEX    作者:ymollard    | 项目源码 | 文件源码
def run(self):
        self.go_to_start()
        self.last_activity = rospy.Time.now()
        self.srv_reset = rospy.Service('ergo/reset', Reset, self._cb_reset)
        rospy.loginfo('Ergo is ready and starts joystick servoing...')
        self.t = rospy.Time.now()

        while not rospy.is_shutdown():
            now = rospy.Time.now()
            self.delta_t = (now - self.t).to_sec()
            self.t = now

            self.go_or_resume_standby()
            self.servo_robot(self.joy_y, self.joy_x)
            self.publish_state()
            self.publish_button()

            # Update the last activity
            if abs(self.joy_x) > self.params['min_joy_activity'] or abs(self.joy_y) > self.params['min_joy_activity']:
                self.last_activity = rospy.Time.now()

            self.rate.sleep()
项目:APEX    作者:ymollard    | 项目源码 | 文件源码
def _read(self, max_attempts=600):
        got_image = False
        if self._camera is not None and self._camera.isOpened():
            got_image, image = self._camera.read()

        if not got_image:
            if not self._error:
                if max_attempts > 0:
                    rospy.sleep(0.1)
                    self._open()
                    return self._read(max_attempts-1)
                rospy.logerr("Reached maximum camera reconnection attempts, abandoning!")
                self._error = True
                return False, None
            return False, None
        return True, image