SoftBank Robotics documentation What's new in NAOqi 2.5?

Cartesian control API

NAOqi Motion - Overview | API | Tutorial


Method list

class ALMotionProxy

Depecated Methods

Methods

void ALMotionProxy::positionInterpolation(const std::string& effectorName, const int& frame, const AL::ALValue& path, const int& axisMask, const AL::ALValue& durations, const bool& isAbsolute)

Deprecated since version 1.16: use ALMotionProxy::positionInterpolations instead.

nao NAO only

Moves an end-effector to the given position and orientation over time. This is a blocking call.

Parameters:
  • effectorName – Name of the effector. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frame – Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • path – Vector of 6D position arrays (x,y,z,wx,wy,wz) in meters and radians
  • axisMask – Axis mask. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both
  • durations – Vector of times in seconds corresponding to the path points
  • isAbsolute – If true, the movement is absolute else relative
void ALMotionProxy::positionInterpolations(const AL::ALValue& effectorNames, const AL::ALValue& frames, const AL::ALValue& paths, const AL::ALValue& axisMasks, const AL::ALValue& relativeTimes)

nao NAO only

There are two overloads of this function:

Moves end-effectors to the given positions and orientations over time. This is a blocking call.

Parameters:
  • effectorNames – Vector of effector names. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frames – Vector of Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • paths – Absolute Vector of 6D position arrays (x,y,z,wx,wy,wz) in meters and radians
  • axisMasks – Vector of Axis Masks. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both
  • relativeTimes – Vector of times in seconds corresponding to the path points

almotion_positionInterpolations.py

#! /usr/bin/env python
# -*- encoding: UTF-8 -*-

"""Example: Use positionInterpolations Method"""

import qi
import argparse
import sys
import almath
import motion


def main(session):
    """
    This example uses the positionInterpolations method.
    """
    # Get the services ALMotion & ALRobotPosture.

    motion_service  = session.service("ALMotion")
    posture_service = session.service("ALRobotPosture")

    # Wake up robot
    motion_service.wakeUp()

    # Send robot to Pose Init
    posture_service.goToPosture("StandInit", 0.5)

    # Example showing how to use positionInterpolations
    frame = motion.FRAME_ROBOT
    useSensorValues = False

    dx = 0.03 # translation axis X (meters)
    dy = 0.04 # translation axis Y (meters)

    # Motion of Arms with block process
    effectorList = []
    pathList     = []

    axisMaskList = [motion.AXIS_MASK_VEL, motion.AXIS_MASK_VEL]
    timeList     = [[1.0], [1.0]]         # seconds

    effectorList.append("LArm")
    currentPos = motion_service.getPosition("LArm", frame, useSensorValues)
    targetPos = almath.Position6D(currentPos)
    targetPos.y -= dy
    pathList.append(list(targetPos.toVector()))

    effectorList.append("RArm")
    currentPos = motion_service.getPosition("RArm", frame, useSensorValues)
    targetPos = almath.Position6D(currentPos)
    targetPos.y += dy
    pathList.append(list(targetPos.toVector()))

    motion_service.positionInterpolations(effectorList, frame, pathList,
                                 axisMaskList, timeList)

    # Motion of Arms and Torso with block process
    axisMaskList = [motion.AXIS_MASK_VEL,
                    motion.AXIS_MASK_VEL,
                    motion.AXIS_MASK_ALL]
    timeList    = [[4.0],
                    [4.0],
                    [1.0, 2.0, 3.0, 4.0]] # seconds

    effectorList = []
    pathList     = []

    effectorList.append("LArm")
    pathList.append([motion_service.getPosition("LArm", frame, useSensorValues)])

    effectorList.append("RArm")
    pathList.append([motion_service.getPosition("RArm", frame, useSensorValues)])

    effectorList.append("Torso")
    torsoList  = []
    currentPos = motion_service.getPosition("Torso", frame, useSensorValues)
    targetPos  = almath.Position6D(currentPos)
    targetPos.y += dy
    torsoList.append(list(targetPos.toVector()))

    targetPos = almath.Position6D(currentPos)
    targetPos.x -= dx
    torsoList.append(list(targetPos.toVector()))

    targetPos = almath.Position6D(currentPos)
    targetPos.y -= dy
    torsoList.append(list(targetPos.toVector()))

    targetPos = almath.Position6D(currentPos)
    torsoList.append(list(targetPos.toVector()))
    pathList.append(torsoList)

    motion_service.positionInterpolations(effectorList, frame, pathList,
                                 axisMaskList, timeList)

    # Go to rest position
    motion_service.rest()


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--ip", type=str, default="127.0.0.1",
                        help="Robot IP address. On robot or Local Naoqi: use '127.0.0.1'.")
    parser.add_argument("--port", type=int, default=9559,
                        help="Naoqi port number")

    args = parser.parse_args()
    session = qi.Session()
    try:
        session.connect("tcp://" + args.ip + ":" + str(args.port))
    except RuntimeError:
        print ("Can't connect to Naoqi at ip \"" + args.ip + "\" on port " + str(args.port) +".\n"
               "Please check your script arguments. Run with -h option for help.")
        sys.exit(1)
    main(session)
void ALMotionProxy::positionInterpolations(const std::vector<std::string>& effectorNames, const int& frame, const AL::ALValue& paths, const AL::ALValue& axisMasks, const AL::ALValue& relativeTimes, const bool& isAbsolute)

Deprecated since version 1.16: use ALMotionProxy::positionInterpolations instead.

nao NAO only

Moves end-effectors to the given positions and orientations over time. This is a blocking call.

Parameters:
  • effectorNames – Vector of effector names. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frame – Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • paths – Vector of 6D position arrays (x,y,z,wx,wy,wz) in meters and radians
  • axisMasks – Vector of Axis Masks. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both
  • relativeTimes – Vector of times in seconds corresponding to the path points
  • isAbsolute – If true, the movement is absolute else relative
void ALMotionProxy::setPositions(const AL::ALValue& effectorNames, const AL::ALValue& frame, const AL::ALValue& position, const float& fractionMaxSpeed, const AL::ALValue& axisMask)

nao NAO only

Moves an end-effector to the given position and orientation. This is a non-blocking call.

Parameters:
  • effectorNames – Name or names of effector. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frame – The Task frame or task frame list {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • position – Position6D array (x,y,z,wx,wy,wz) in meters and radians
  • fractionMaxSpeed – The fraction of maximum speed to use
  • axisMask – The Axis Mask or Axis Mask list. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both

almotion_setpositions.cpp

#include <iostream>
#include <alproxies/almotionproxy.h>
#include <alproxies/alrobotpostureproxy.h>
#include <qi/os.hpp>

int main(int argc, char **argv)
{
  std::string robotIp = "127.0.0.1";

  if (argc < 2) {
    std::cerr << "Usage: almotion_setpositions robotIp "
              << "(optional default \"127.0.0.1\")."<< std::endl;
  }
  else {
    robotIp = argv[1];
  }

  AL::ALMotionProxy motion(robotIp);
  AL::ALRobotPostureProxy robotPosture(robotIp);

  robotPosture.goToPosture("StandInit", 0.5f);

  // Example showing how to set Torso Position, using a fraction of max speed
  std::string chainName  = "Torso";
  int space              = 2;
  std::vector<float> position(6, 0.0f); // Absolute Position
  position[2] = 0.25f;
  float fractionMaxSpeed = 0.2f;
  int axisMask           = 63;
  motion.setPositions(chainName, space, position, fractionMaxSpeed, axisMask);
  qi::os::sleep(2.0f);

  return 0;
}

almotion_setPositions.py

#! /usr/bin/env python
# -*- encoding: UTF-8 -*-

"""Example: Use setPositions Method"""

import qi
import argparse
import sys
import time
import motion


def main(session):
    """
    This example uses the setPositions method.
    """
    # Get the services ALMotion & ALRobotPosture.

    motion_service  = session.service("ALMotion")
    posture_service = session.service("ALRobotPosture")

    # Wake up robot
    motion_service.wakeUp()

    # Send robot to Pose Init
    posture_service.goToPosture("StandInit", 0.5)

    # Example showing how to set LArm Position, using a fraction of max speed
    chainName = "LArm"
    frame     = motion.FRAME_TORSO
    useSensor = False

    # Get the current position of the chainName in the same frame
    current = motion_service.getPosition(chainName, frame, useSensor)

    target = [
        current[0] + 0.05,
        current[1] + 0.05,
        current[2] + 0.05,
        current[3] + 0.0,
        current[4] + 0.0,
        current[5] + 0.0]

    fractionMaxSpeed = 0.5
    axisMask         = 7 # just control position

    motion_service.setPositions(chainName, frame, target, fractionMaxSpeed, axisMask)

    time.sleep(1.0)

    # Example showing how to set Torso Position, using a fraction of max speed
    chainName        = "Torso"
    frame            = motion.FRAME_ROBOT
    position         = [0.0, 0.0, 0.25, 0.0, 0.0, 0.0] # Absolute Position
    fractionMaxSpeed = 0.2
    axisMask         = 63
    motion_service.setPositions(chainName, frame, position, fractionMaxSpeed, axisMask)

    time.sleep(4.0)

    # Go to rest position
    motion_service.rest()


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--ip", type=str, default="127.0.0.1",
                        help="Robot IP address. On robot or Local Naoqi: use '127.0.0.1'.")
    parser.add_argument("--port", type=int, default=9559,
                        help="Naoqi port number")

    args = parser.parse_args()
    session = qi.Session()
    try:
        session.connect("tcp://" + args.ip + ":" + str(args.port))
    except RuntimeError:
        print ("Can't connect to Naoqi at ip \"" + args.ip + "\" on port " + str(args.port) +".\n"
               "Please check your script arguments. Run with -h option for help.")
        sys.exit(1)
    main(session)
void ALMotionProxy::setPosition(const std::string& effectorName, const int& frame, const std::vector<float>& position, const float& fractionMaxSpeed, const int& axisMask)

Deprecated since version 1.16: use ALMotionProxy::setPositions instead.

nao NAO only

Moves an end-effector to the given position and orientation. This is a non-blocking call.

Parameters:
  • effectorName – Name of the effector. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frame – Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • position – 6D position array (x,y,z,wx,wy,wz) in meters and radians
  • fractionMaxSpeed – The fraction of maximum speed to use
  • axisMask – Axis mask. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both
void ALMotionProxy::changePosition(const std::string& effectorName, const int& frame, const std::vector<float>& positionChange, const float& fractionMaxSpeed, const int& axisMask)

Deprecated since version 1.16: use ALMotionProxy::setPositions instead.

nao NAO only

Creates a move of an end effector in Cartesian frame. This is a non-blocking call.

Parameters:
  • effectorName – Name of the effector. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frame – Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • positionChange – 6D position change array (xd, yd, zd, wxd, wyd, wzd) in meters and radians
  • fractionMaxSpeed – The fraction of maximum speed to use
  • axisMask – Axis mask. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both
std::vector<float> ALMotionProxy::getPosition(const std::string& name, const int& frame, const bool& useSensorValues)

Gets a Position relative to the FRAME. Axis definition: the x axis is positive toward the robot’s front, the y from right to left and the z is vertical. The angle convention of Position6D is Rot_z(wz).Rot_y(wy).Rot_x(wx).

Parameters:
  • name – Name of the item. Could be: any joint or chain or sensor (Use ALMotionProxy::getSensorNames for the list of sensors supported on your robot)
  • frame – Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • useSensorValues – If true, the sensor values will be used to determine the position.
Returns:

Vector containing the Position6D using meters and radians (x, y, z, wx, wy, wz)

almotion_getposition.cpp

#include <iostream>
#include <alproxies/almotionproxy.h>

int main(int argc, char **argv)
{
  std::string robotIp = "127.0.0.1";

  if (argc < 2) {
    std::cerr << "Usage: almotion_getposition robotIp "
              << "(optional default \"127.0.0.1\")."<< std::endl;
  }
  else {
    robotIp = argv[1];
  }

  AL::ALMotionProxy motion(robotIp);

  // Example showing how to get the position of the top camera
  std::string name = "CameraTop";
  int space = 1;
  bool useSensorValues = true;
  std::vector<float> result = motion.getPosition(name, space, useSensorValues);
  std::cout << name << ":" << std::endl;
  std::cout << "Position (x, y, z): " << result.at(0) << ", " << result.at(1)
            << ", " << result.at(2) << std::endl;
  std::cout << "Rotation (x, y, z): " << result.at(3) << ", " << result.at(4)
            << ", " << result.at(5) << std::endl;

  return 0;
}

almotion_getPosition.py

#! /usr/bin/env python
# -*- encoding: UTF-8 -*-

"""Example: Use getPosition Method"""

import qi
import argparse
import sys
import motion


def main(session):
    """
    This example uses the getPosition method.
    """
    # Get the service ALMotion.

    motion_service  = session.service("ALMotion")

    # Example showing how to get the position of the top camera
    name            = "CameraTop"
    frame           = motion.FRAME_WORLD
    useSensorValues = True
    result          = motion_service.getPosition(name, frame, useSensorValues)
    print "Position of", name, " in World is:"
    print result


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--ip", type=str, default="127.0.0.1",
                        help="Robot IP address. On robot or Local Naoqi: use '127.0.0.1'.")
    parser.add_argument("--port", type=int, default=9559,
                        help="Naoqi port number")

    args = parser.parse_args()
    session = qi.Session()
    try:
        session.connect("tcp://" + args.ip + ":" + str(args.port))
    except RuntimeError:
        print ("Can't connect to Naoqi at ip \"" + args.ip + "\" on port " + str(args.port) +".\n"
               "Please check your script arguments. Run with -h option for help.")
        sys.exit(1)
    main(session)
void ALMotionProxy::transformInterpolation(const std::string& effectorName, const int& frame, const AL::ALValue& path, const int& axisMask, const AL::ALValue& duration, const bool& isAbsolute)

Deprecated since version 1.16: use ALMotionProxy::transformInterpolations instead.

nao NAO only

Moves an end-effector to the given position and orientation over time using homogeneous transforms to describe the positions or changes. This is a blocking call.

Parameters:
  • effectorName – Name of the effector. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frame – Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • path – Vector of Transform arrays
  • axisMask – Axis mask. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both
  • duration – Vector of times in seconds corresponding to the path points
  • isAbsolute – If true, the movement is absolute else relative
void ALMotionProxy::transformInterpolations(const AL::ALValue& effectorNames, const AL::ALValue& frames, const AL::ALValue& paths, const AL::ALValue& axisMasks, const AL::ALValue& relativeTimes)

nao NAO only

There are two overloads of this function:

Moves end-effector to the given transforms over time. This is a blocking call.

Parameters:
  • effectorNames – Vector of effector names. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frames – Vector of Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • paths – Absolute vector of transforms arrays.
  • axisMasks – Vector of Axis Masks. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both
  • relativeTimes – Vector of times in seconds corresponding to the path points

almotion_transformInterpolations.py

#! /usr/bin/env python
# -*- encoding: UTF-8 -*-

"""Example: Use transphormInterpolation Method"""

import qi
import argparse
import sys
import almath
import motion


def main(session):
    """
    This example uses the transphormInterpolation method.
    """
    # Get the services ALMotion & ALRobotPosture.

    motion_service  = session.service("ALMotion")
    posture_service = session.service("ALRobotPosture")

    # Wake up robot
    motion_service.wakeUp()

    # Send robot to Pose Init
    posture_service.goToPosture("StandInit", 0.5)

    # Example showing how to use transformInterpolations
    frame = motion.FRAME_ROBOT
    useSensorValues = False

    # Motion of Arms with block process
    effectorList = ["LArm", "RArm"]
    axisMaskList = [motion.AXIS_MASK_VEL, motion.AXIS_MASK_VEL]
    timeList     = [[1.0], [1.0]]         # seconds

    dy = 0.04
    pathList = []
    targetLArmTf = almath.Transform(motion_service.getTransform("LArm", frame, useSensorValues))
    targetLArmTf.r2_c4 -= dy
    pathList.append(list(targetLArmTf.toVector()))

    targetRArmTf = almath.Transform(motion_service.getTransform("RArm", frame, useSensorValues))
    targetRArmTf.r2_c4 += dy
    pathList.append(list(targetRArmTf.toVector()))

    motion_service.transformInterpolations(effectorList, frame, pathList,
                                 axisMaskList, timeList)

    # Motion of Arms and Torso with block process
    effectorList = ["LArm", "RArm", "Torso"]
    axisMaskList = [motion.AXIS_MASK_VEL,
                    motion.AXIS_MASK_VEL,
                    motion.AXIS_MASK_ALL]
    timeList     = [[4.0],
                    [4.0],
                    [1.0, 2.0, 3.0, 4.0]] # seconds

    dx = 0.03 # translation axis X (meters)
    dy = 0.05 # translation axis Y (meters)

    pathList = []

    targetLArmTf = almath.Transform(motion_service.getTransform("LArm", frame, useSensorValues))
    pathList.append(list(targetLArmTf.toVector()))

    targetRArmTf = almath.Transform(motion_service.getTransform("RArm", frame, useSensorValues))
    pathList.append(list(targetRArmTf.toVector()))

    pathTorsoList = []
    # point 1
    initTorsoTf = almath.Transform(motion_service.getTransform("Torso", frame, useSensorValues))
    targetTorsoTf = initTorsoTf
    targetTorsoTf.r2_c4 += dy
    pathTorsoList.append(list(targetTorsoTf.toVector()))

    # point 2
    initTorsoTf = almath.Transform(motion_service.getTransform("Torso", frame, useSensorValues))
    targetTorsoTf = initTorsoTf
    targetTorsoTf.r1_c4 += -dx
    pathTorsoList.append(list(targetTorsoTf.toVector()))

    # point 3
    initTorsoTf = almath.Transform(motion_service.getTransform("Torso", frame, useSensorValues))
    targetTorsoTf = initTorsoTf
    targetTorsoTf.r2_c4 += -dy
    pathTorsoList.append(list(targetTorsoTf.toVector()))

    # point 4
    initTorsoTf = almath.Transform(motion_service.getTransform("Torso", frame, useSensorValues))
    pathTorsoList.append(list(initTorsoTf.toVector()))

    pathList.append(pathTorsoList)

    motion_service.transformInterpolations(effectorList, frame, pathList,
                                  axisMaskList, timeList)

    # Go to rest position
    motion_service.rest()


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--ip", type=str, default="127.0.0.1",
                        help="Robot IP address. On robot or Local Naoqi: use '127.0.0.1'.")
    parser.add_argument("--port", type=int, default=9559,
                        help="Naoqi port number")

    args = parser.parse_args()
    session = qi.Session()
    try:
        session.connect("tcp://" + args.ip + ":" + str(args.port))
    except RuntimeError:
        print ("Can't connect to Naoqi at ip \"" + args.ip + "\" on port " + str(args.port) +".\n"
               "Please check your script arguments. Run with -h option for help.")
        sys.exit(1)
    main(session)
void ALMotionProxy::transformInterpolations(const std::vector<std::string>& effectorNames, const int& frame, const AL::ALValue& paths, const AL::ALValue& axisMasks, const AL::ALValue& relativeTimes, const bool& isAbsolute)

Deprecated since version 1.16: use ALMotionProxy::transformInterpolations instead.

nao NAO only

Moves end-effector to the given transforms over time. This is a blocking call.

Parameters:
  • effectorNames – Vector of effector names. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frame – Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • paths – Vector of transforms arrays.
  • axisMasks – Vector of Axis Masks. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both
  • relativeTimes – Vector of times in seconds corresponding to the path points
  • isAbsolute – If true, the movement is absolute else relative
void ALMotionProxy::setTransforms(const std::string& effectorName, const int& frame, const std::vector<float>& transform, const float& fractionMaxSpeed, const int& axisMask)

nao NAO only

Moves an end-effector to the given position and orientation transform. This is a non-blocking call.

Parameters:
  • effectorName – Name of the effector. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frame – Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • transform – Transform arrays
  • fractionMaxSpeed – The fraction of maximum speed to use
  • axisMask – Axis mask. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both

almotion_settransforms.cpp

#include <iostream>
#include <alproxies/almotionproxy.h>
#include <alproxies/alrobotpostureproxy.h>
#include <qi/os.hpp>

int main(int argc, char **argv)
{
  std::string robotIp = "127.0.0.1";

  if (argc < 2) {
    std::cerr << "Usage: almotion_settransforms robotIp "
              << "(optional default \"127.0.0.1\")."<< std::endl;
  }
  else {
    robotIp = argv[1];
  }

  AL::ALMotionProxy motion(robotIp);
  AL::ALRobotPostureProxy robotPosture(robotIp);

  robotPosture.goToPosture("StandInit", 0.5f);

  // Example showing how to set Torso Transform, using a fraction of max speed
  std::string chainName  = "Torso";
  int space              = 2;
  std::vector<float> transform(12, 0.0f); // Absolute Position
  transform[0]  = 1.0f;  // 1.0f, 0.0f, 0.0f, 0.00f
  transform[5]  = 1.0f;  // 0.0f, 1.0f, 0.0f, 0.00f
  transform[10] = 1.0f;  // 0.0f, 0.0f, 1.0f, 0.25f
  transform[11] = 0.25f;
  float fractionMaxSpeed = 0.2f;
  int axisMask           = 63;
  motion.setTransforms(chainName, space, transform, fractionMaxSpeed, axisMask);
  qi::os::sleep(3.0f);

  return 0;
}

almotion_setTransforms.py

#! /usr/bin/env python
# -*- encoding: UTF-8 -*-

"""Example: Use setTransforms Method"""

import qi
import argparse
import sys
import time
import motion


def main(session):
    """
    This example uses the setTransforms method.
    """
    # Get the services ALMotion & ALRobotPosture.

    motion_service  = session.service("ALMotion")
    posture_service = session.service("ALRobotPosture")

    # Wake up robot
    motion_service.wakeUp()

    # Send robot to Pose Init
    posture_service.goToPosture("StandInit", 0.5)

    # Example showing how to set Torso Transform, using a fraction of max speed
    chainName        = "Torso"
    frame            = motion.FRAME_ROBOT
    transform        = [1.0, 0.0, 0.0, 0.00,
                        0.0, 1.0, 0.0, 0.00,
                        0.0, 0.0, 1.0, 0.25]
    fractionMaxSpeed = 0.2
    axisMask         = 63
    motion_service.setTransforms(chainName, frame, transform, fractionMaxSpeed, axisMask)

    time.sleep(4.0)

    # Go to rest position
    motion_service.rest()


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--ip", type=str, default="127.0.0.1",
                        help="Robot IP address. On robot or Local Naoqi: use '127.0.0.1'.")
    parser.add_argument("--port", type=int, default=9559,
                        help="Naoqi port number")

    args = parser.parse_args()
    session = qi.Session()
    try:
        session.connect("tcp://" + args.ip + ":" + str(args.port))
    except RuntimeError:
        print ("Can't connect to Naoqi at ip \"" + args.ip + "\" on port " + str(args.port) +".\n"
               "Please check your script arguments. Run with -h option for help.")
        sys.exit(1)
    main(session)
void ALMotionProxy::setTransform(const std::string& effectorName, const int& frame, const std::vector<float>& transform, const float& fractionMaxSpeed, const int& axisMask)

Deprecated since version 1.16: use ALMotionProxy::setTransforms instead.

nao NAO only

Moves an end-effector to the given position and orientation transform. This is a non-blocking call.

Parameters:
  • effectorName – Name of the effector. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frame – Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • transform – Transform arrays
  • fractionMaxSpeed – The fraction of maximum speed to use
  • axisMask – Axis mask. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both
void ALMotionProxy::changeTransform(const std::string& effectorName, const int& frame, const std::vector<float>& transform, const float& fractionMaxSpeed, const int& axisMask)

Deprecated since version 1.16: use ALMotionProxy::setTransforms instead.

nao NAO only

Moves an end-effector to the given position and orientation transform. This is a non-blocking call.

Parameters:
  • effectorName – Name of the effector. Could be: “Torso” or chain name. Use ALMotionProxy::getBodyNames for the list of chain supported on your robot.
  • frame – Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • transform – Transform arrays
  • fractionMaxSpeed – The fraction of maximum speed to use
  • axisMask – Axis mask. True for axes that you wish to control. e.g. 7 for position only, 56 for rotation only and 63 for both
std::vector<float> ALMotionProxy::getTransform(const std::string& name, const int& frame, const bool& useSensorValues)

Gets an Homogeneous Transform relative to the FRAME. Axis definition: the x axis is positive toward the robot’s front, the y from right to left and the z is vertical.

Parameters:
  • name – Name of the item. Could be: any joint or chain or sensor (Use ALMotionProxy::getSensorNames for the list of sensors supported on your robot).
  • frame – Task frame {FRAME_TORSO = 0, FRAME_WORLD = 1, FRAME_ROBOT = 2}.
  • useSensorValues – If true, the sensor values will be used to determine the position.
Returns:

Vector of 16 floats corresponding to the values of the matrix, line by line.

almotion_gettransform.cpp

#include <iostream>
#include <alproxies/almotionproxy.h>

int main(int argc, char **argv)
{
  std::string robotIp = "127.0.0.1";

  if (argc < 2) {
    std::cerr << "Usage: almotion_gettransform robotIp "
              << "(optional default \"127.0.0.1\")."<< std::endl;
  }
  else {
    robotIp = argv[1];
  }

  AL::ALMotionProxy motion(robotIp);

  // Example showing how to get the end of the right arm as a transform
  // represented in torso space.
  std::string name  = "RArm";
  int space  = 0; // TORSO_SPACE
  bool useSensorValues  = false;
  std::vector<float> result = motion.getTransform(name, space, useSensorValues);
  // R R R x
  // R R R y
  // R R R z
  // 0 0 0 1
  std::cout << "Transform of RArm" << std::endl;
  std::cout << result.at(0) << " " << result.at(1) << " " << result.at(2) << " " << result.at(3) << std::endl;
  std::cout << result.at(4) << " " << result.at(5) << " " << result.at(6) << " " << result.at(7) << std::endl;
  std::cout << result.at(8) << " " << result.at(9) << " " << result.at(10) << " " << result.at(11) << std::endl;

  return 0;
}

almotion_getTransform.py

#! /usr/bin/env python
# -*- encoding: UTF-8 -*-

"""Example: Use getTransform Method"""

import qi
import argparse
import sys
import motion


def main(session):
    """
    This example uses the getTransform method.
    """
    # Get the services ALMotion & ALRobotPosture.

    motion_service  = session.service("ALMotion")

    # Example showing how to get the end of the right arm as a transform
    # represented in torso space. The result is a 4 by 4 matrix composed
    # of a 3*3 rotation matrix and a column vector of positions.
    name  = 'RArm'
    frame  = motion.FRAME_TORSO
    useSensorValues  = True
    result = motion_service.getTransform(name, frame, useSensorValues)
    for i in range(0, 4):
        for j in range(0, 4):
            print result[4*i + j],
        print ''


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--ip", type=str, default="127.0.0.1",
                        help="Robot IP address. On robot or Local Naoqi: use '127.0.0.1'.")
    parser.add_argument("--port", type=int, default=9559,
                        help="Naoqi port number")

    args = parser.parse_args()
    session = qi.Session()
    try:
        session.connect("tcp://" + args.ip + ":" + str(args.port))
    except RuntimeError:
        print ("Can't connect to Naoqi at ip \"" + args.ip + "\" on port " + str(args.port) +".\n"
               "Please check your script arguments. Run with -h option for help.")
        sys.exit(1)
    main(session)