@conference {7041449, title = {Grasp quality metrics for robot hands benchmarking}, booktitle = {2014 IEEE-RAS International Conference on Humanoid Robots}, year = {2014}, month = {Nov}, pages = {761-766}, keywords = {Computational modeling, grasp configurations, grasp quality metrics, Grasping, grasping capabilities, Humanoid robots, Joints, manipulators, Measurement, robot hand designs, robot hands benchmarking, Robots, Standards, Synchronous digital hierarchy}, issn = {2164-0572}, doi = {10.1109/HUMANOIDS.2014.7041449}, author = {C. Rubert and B. Le{\'o}n and A. Morales} } @article {63, title = {Pose Estimation Through Cue Integration: A Neuroscience-Inspired Approach}, journal = {Systems, Man, and Cybernetics, Part B: Cybernetics, IEEE Transactions on}, volume = {42}, year = {2012}, pages = {530-538}, abstract = {

The aim of this paper is to improve the skills of robotic systems in their interaction with nearby objects. The basic idea is to enhance visual estimation of objects in the world through the merging of different visual estimators of the same stimuli. A neuroscience-inspired model of stereoptic and perspective orientation estimators, merged according to different criteria, is implemented on a robotic setup and tested in different conditions. Experimental results suggest that the integration of multiple monocular and binocular cues can make robot sensory systems more reliable and versatile. The same results, compared with simulations and data from human studies, show that the model is able to reproduce some well-recognized neuropsychological effects.

}, keywords = {binocular cue integration, Biological system modeling, Cameras, Computational modeling, Computer Simulation, Computer-Assisted, Cybernetics, Depth Perception, Estimation, Grasping, grippers, Humans, Image Processing, Intelligent robots, Models, monocular cue integration, Neurological, neuropsychological effects, neuroscience-inspired model, object estimation, perspective orientation estimator, pose estimation, Reliability, Reproducibility of Results, robot sensory systems, robot vision, robot vision systems, Robotics, Robots, stereo image processing, stereo vision, stereoptic orientation estimator, Task Performance and Analysis, visual estimation, visual perception, Visualization}, issn = {1083-4419}, doi = {10.1109/TSMCB.2011.2168952}, author = {Eris Chinellato and Beata J. Grzyb and Angel P. del Pobil} } @conference {32, title = {Hierarchical object recognition inspired by primate brain mechanisms}, booktitle = {Computational Intelligence for Visual Intelligence (CIVI), 2011 IEEE Workshop on}, year = {2011}, keywords = {brain, Estimation, Grasping, hierarchical object recognition, Image color analysis, multimodal integration, mutual projection, neurophysiology, neuroscience hypothesis, object recognition, object weight estimation, primate brain mechanism, real robot, robot vision, Robots, Shape, visual processing, Visualization, visuomotor behavior}, doi = {10.1109/CIVI.2011.5955017}, author = {Eris Chinellato and Javier Felip and Beata J. Grzyb and Antonio Morales and Angel P. del Pobil} } @conference {34, title = {Mind the gap - robotic grasping under incomplete observation}, booktitle = {Robotics and Automation (ICRA), 2011 IEEE International Conference on}, year = {2011}, keywords = {Approximation methods, collision-free movements, gap robotic grasping, Grasping, Image reconstruction, incomplete observation, manipulator kinematics, mesh generation, mesh reconstruction, object shape prediction, Planning, Robots, Shape, Surface reconstruction}, doi = {10.1109/ICRA.2011.5980354}, author = {J. Bohg and M. Johnson-Roberson and Beatriz Le{\'o}n and Javier Felip and Gratal, X. and N Bergstrom and Danica Kragic and Antonio Morales} } @conference {78, title = {An active learning approach for assessing robot grasp reliability}, booktitle = {Intelligent Robots and Systems, 2004. (IROS 2004). Proceedings. 2004 IEEE/RSJ International Conference on}, year = {2004}, abstract = {

Learning techniques in robotic grasping applications have usually been concerned with the way a hand approaches to an object, or with improving the motor control of manipulation actions. We present an active learning approach devised to face the problem of visually-guided grasp selection. We want to choose the best hand configuration for grasping a particular object using only visual information. Experimental data from real grasping actions is used, and the experience gathering process is driven by an on-line estimation of the reliability assessment capabilities of the system. The goal is to improve the selection skills of the grasping system, minimizing at the same time the cost and duration of the learning process.

}, keywords = {active learning approach, Costs, Grasping, Haptic interfaces, Intelligent robots, Laboratories, learning (artificial intelligence), manipulators, motor control, Motor drives, online estimation, Reliability, reliability assessment capabilities, robot grasp reliability, Robot sensing systems, Torso, Training data, Uncertainty, visually-guided grasp selection}, doi = {10.1109/IROS.2004.1389399}, author = {Antonio Morales and Eris Chinellato and Fagg, A.H. and Angel P. del Pobil} } @conference {77, title = {Experimental prediction of the performance of grasp tasks from visual features}, booktitle = {Intelligent Robots and Systems, 2003. (IROS 2003). Proceedings. 2003 IEEE/RSJ International Conference on}, year = {2003}, abstract = {

This paper deals with visually guided grasping of unmodeled objects for robots which exhibit an adaptive behavior based on their previous experiences. Nine features are proposed to characterize three-finger grasps. They are computed from the object image and the kinematics of the hand. Real experiments on a humanoid robot with a Barrett hand are carried out to provide experimental data. This data is employed by a classification strategy, based on the k-nearest neighbour estimation rule, to predict the reliability of a grasp configuration in terms of five different performance classes. Prediction results suggest the methodology is adequate.

}, keywords = {adaptive behavior, Barrett hand, dexterous manipulators, estimation rule, feature extraction, Geometry, grasp configuration, Grasping, hand kinematics, humanoid robot, Humans, Image reconstruction, Intelligent robots, Kinematics, Laboratories, manipulator kinematics, object image, performance prediction, prediction theory, Reliability, Robot sensing systems, robot vision, Robustness, Service robots, three finger grasps, unmodeled objects, visual features, visually guided grasping}, doi = {10.1109/IROS.2003.1249685}, author = {Antonio Morales and Eris Chinellato and Fagg, A.H. and Angel P. del Pobil} }