@conference {24, title = {Implicit mapping of the peripersonal space of a humanoid robot}, booktitle = {Computational Intelligence, Cognitive Algorithms, Mind, and Brain (CCMB), 2011 IEEE Symposium on}, year = {2011}, abstract = {

In this work, taking inspiration from primate visuomotor mechanisms, a humanoid robot is able to build a sensorimotor map of the environment that is configured and trained through gazing and reaching movements. The map is accessed and modified by two types of information: retinotopic (visual) and proprioceptive (eye and arm movements), and constitutes both a knowledge of the environment and a sensorimotor code for performing movements and evaluate their outcome. By performing direct and inverse transformations between stereo vision, oculomotor and joint-space representations, the robot learns to perform gazing and reaching movements, which are in turn employed to update the sensorimotor knowledge of the environment. Thus, the robot keeps learning during its normal behavior, by interacting with the world and contextually updating its representation of the world itself. Such representation is never made explicit, but rather constitutes a visuomotor awareness of the space which emerges thanks to the interaction of the agent with the surrounding space.

}, keywords = {Head, humanoid robot, joint space representation, Joints, Neurons, oculomotor, peripersonal space, primate visuomotor mechanisms, proprioceptive information, retinotopic information, Robot kinematics, Robot sensing systems, robot vision, Robotics, sensorimotor code, sensorimotor knowledge, stereo image processing, stereo vision, Visualization, visuomotor awareness}, doi = {10.1109/CCMB.2011.5952119}, author = {Marco Antonelli and Eris Chinellato and Angel P. del Pobil} }