<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
<html xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:w="urn:schemas-microsoft-com:office:word" xmlns:m="http://schemas.microsoft.com/office/2004/12/omml" xmlns="http://www.w3.org/TR/REC-html40"><head><meta name=Generator content="Microsoft Word 15 (filtered medium)"><style><!--
/* Font Definitions */
@font-face
{font-family:Wingdings;
panose-1:5 0 0 0 0 0 0 0 0 0;}
@font-face
{font-family:"Cambria Math";
panose-1:2 4 5 3 5 4 6 3 2 4;}
@font-face
{font-family:Calibri;
panose-1:2 15 5 2 2 2 4 3 2 4;}
@font-face
{font-family:Consolas;
panose-1:2 11 6 9 2 2 4 3 2 4;}
@font-face
{font-family:Cambria;
panose-1:2 4 5 3 5 4 6 3 2 4;}
/* Style Definitions */
p.MsoNormal, li.MsoNormal, div.MsoNormal
{margin:0cm;
font-size:11.0pt;
font-family:"Calibri",sans-serif;
mso-fareast-language:EN-US;}
a:link, span.MsoHyperlink
{mso-style-priority:99;
color:#0563C1;
text-decoration:underline;}
p.MsoPlainText, li.MsoPlainText, div.MsoPlainText
{mso-style-priority:99;
mso-style-link:"Texto sin formato Car";
margin:0cm;
font-size:10.5pt;
font-family:Consolas;
color:black;}
span.TextosinformatoCar
{mso-style-name:"Texto sin formato Car";
mso-style-priority:99;
mso-style-link:"Texto sin formato";
font-family:Consolas;
color:black;
mso-fareast-language:ES;}
.MsoChpDefault
{mso-style-type:export-only;
font-size:10.0pt;}
@page WordSection1
{size:612.0pt 792.0pt;
margin:70.85pt 3.0cm 70.85pt 3.0cm;}
div.WordSection1
{page:WordSection1;}
/* List Definitions */
@list l0
{mso-list-id:67383653;
mso-list-template-ids:1814447194;}
@list l0:level1
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:36.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l0:level2
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:72.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l0:level3
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:108.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l0:level4
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:144.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l0:level5
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:180.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l0:level6
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:216.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l0:level7
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:252.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l0:level8
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:288.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l0:level9
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:324.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l1
{mso-list-id:1185368536;
mso-list-type:hybrid;
mso-list-template-ids:143948706 201981953 201981955 201981957 201981953 201981955 201981957 201981953 201981955 201981957;}
@list l1:level1
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:none;
mso-level-number-position:left;
text-indent:-18.0pt;
font-family:Symbol;}
@list l1:level2
{mso-level-number-format:bullet;
mso-level-text:o;
mso-level-tab-stop:none;
mso-level-number-position:left;
text-indent:-18.0pt;
font-family:"Courier New";}
@list l1:level3
{mso-level-number-format:bullet;
mso-level-text:\F0A7;
mso-level-tab-stop:none;
mso-level-number-position:left;
text-indent:-18.0pt;
font-family:Wingdings;}
@list l1:level4
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:none;
mso-level-number-position:left;
text-indent:-18.0pt;
font-family:Symbol;}
@list l1:level5
{mso-level-number-format:bullet;
mso-level-text:o;
mso-level-tab-stop:none;
mso-level-number-position:left;
text-indent:-18.0pt;
font-family:"Courier New";}
@list l1:level6
{mso-level-number-format:bullet;
mso-level-text:\F0A7;
mso-level-tab-stop:none;
mso-level-number-position:left;
text-indent:-18.0pt;
font-family:Wingdings;}
@list l1:level7
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:none;
mso-level-number-position:left;
text-indent:-18.0pt;
font-family:Symbol;}
@list l1:level8
{mso-level-number-format:bullet;
mso-level-text:o;
mso-level-tab-stop:none;
mso-level-number-position:left;
text-indent:-18.0pt;
font-family:"Courier New";}
@list l1:level9
{mso-level-number-format:bullet;
mso-level-text:\F0A7;
mso-level-tab-stop:none;
mso-level-number-position:left;
text-indent:-18.0pt;
font-family:Wingdings;}
@list l2
{mso-list-id:1584683456;
mso-list-template-ids:-1518292396;}
@list l2:level1
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:36.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l2:level2
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:72.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l2:level3
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:108.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l2:level4
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:144.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l2:level5
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:180.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l2:level6
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:216.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l2:level7
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:252.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l2:level8
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:288.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
@list l2:level9
{mso-level-number-format:bullet;
mso-level-text:\F0B7;
mso-level-tab-stop:324.0pt;
mso-level-number-position:left;
text-indent:-18.0pt;
mso-ansi-font-size:10.0pt;
font-family:Symbol;}
ol
{margin-bottom:0cm;}
ul
{margin-bottom:0cm;}
--></style><!--[if gte mso 9]><xml>
<o:shapedefaults v:ext="edit" spidmax="1026" />
</xml><![endif]--><!--[if gte mso 9]><xml>
<o:shapelayout v:ext="edit">
<o:idmap v:ext="edit" data="1" />
</o:shapelayout></xml><![endif]--></head><body lang=ES link="#0563C1" vlink="#954F72" style='word-wrap:break-word'><div class=WordSection1><p class=MsoPlainText><span lang=EN-US style='font-size:11.0pt;font-family:"Cambria",serif'>[Apologies for multiple copies due to cross-posting. Please forward to colleagues who might be interested]</span><o:p></o:p></p><p class=MsoPlainText><span lang=EN-US style='font-size:11.0pt;font-family:"Cambria",serif'> </span><o:p></o:p></p><p class=MsoPlainText><span lang=EN-US style='font-size:11.0pt;font-family:"Cambria",serif'>Dear Colleagues,</span><o:p></o:p></p><p class=MsoPlainText><span lang=EN-US style='font-size:11.0pt;font-family:"Cambria",serif'> </span><o:p></o:p></p><p class=MsoNormal><span lang=EN-US style='font-family:"Cambria",serif'>We would like to cordially invite you to submit a paper for IJCNN 2023 Special Session on Machine Learning and Deep Learning Methods applied to Vision and Robotics (MLDLMVR) </span><o:p></o:p></p><p class=MsoNormal> <o:p></o:p></p><p class=MsoNormal><span lang=EN-US style='font-family:"Cambria",serif;color:black;mso-fareast-language:ES'>Gold Coast, Australia - 18 -23 June, 2023 </span><span lang=EN-US style='font-family:"Cambria",serif'> </span><span style='font-family:"Cambria",serif'><a href="https://2023.ijcnn.org/">https://2023.ijcnn.org/</a></span><o:p></o:p></p><p class=MsoNormal><o:p> </o:p></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>Aims: <o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>Over the last decades there has been an increasing interest in using machine learning and in the last few years, deep learning methods, combined with other vision techniques to create autonomous systems that solve vision problems in different fields. This special session is designed to serve researchers and developers to publish original, innovative and state-of-the art algorithms and architectures for real time applications in the areas of computer vision, image processing, biometrics, virtual and augmented reality, neural networks, intelligent interfaces and biomimetic object-vision recognition.<o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>This special session provides a platform for academics, developers, and industry-related researchers belonging to the vast communities of *Neural Networks*, *Computational Intelligence*, *Machine Learning*, *Deep Learning*, *Biometrics*, *Vision systems*, and *Robotics *, to discuss, share experience and explore traditional and new areas of the computer vision, machine and deep learning combined to solve a range of problems. The objective of the workshop is to integrate the growing international community of researchers working on the application of Machine Learning and Deep Learning Methods in Vision and Robotics to a fruitful discussion on the evolution and the benefits of this technology to the society.<o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>The methods and tools applied to vision and robotics include, but are not limited to, the following:<o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><ul style='margin-top:0cm' type=disc><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Computational Intelligence methods<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Machine Learning methods<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Self-adaptation, self-organisation and self-supervised learning<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Robust computer vision algorithms (operation under variable conditions, object tracking, behaviour analysis and learning, scene segmentation,,,,) <o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Extraction of Biometric Features (fingerprint, iris, face, voice, palm, gait)<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Registration Methods<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Convolutional Neural Networks CNN <o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Recurrent Neural Networks RNN <o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Deep Reinforcement Learning DRL<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Generative Adversial Networks<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Predictive Learning</span><span lang=EN-GB style='font-size:12.0pt;font-family:"Cambria",serif;mso-fareast-language:ZH-CN'><o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Active-Incremental-Online Learning<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Hardware implementation and algorithms acceleration (GPUs, FPGA,s,…)<o:p></o:p></span></li></ul><p class=MsoNormal style='margin-left:36.0pt;text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>The fields of application can be identified, but are not limited to, the following:<o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><ul style='margin-top:0cm' type=disc><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Video and Image Processing<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Video tracking <o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>3D Scene reconstruction<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>6D Object detection<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Objects Grasping/Manipulation</span><span lang=EN-GB style='font-size:12.0pt;font-family:"Cambria",serif;mso-fareast-language:ZH-CN'><o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>3D Tracking in Virtual Reality Environments <o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>3D Volume visualization<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Intelligent Interfaces (User-friendly Man Machine Interface)<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Multi-camera and RGB-D camera systems <o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Multi-modal Human Pose Recovery and Behavior Analysis<o:p></o:p></span></li><li class=MsoNormal style='mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Human body reconstruction<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Gesture and posture </span><span style='mso-fareast-language:ES'>analysis</span><span style='font-family:"Cambria",serif'> and recognition <o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Biometric Identification and Recognition <o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Extraction of Biometric Features (fingerprint, iris, face, voice, palm, gait)<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Surveillance systems<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Autonomous and Social Robots<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Robotic vision<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Synthetic data generation<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Sim2Real</span><span lang=EN-GB style='font-size:12.0pt;font-family:"Cambria",serif;mso-fareast-language:ZH-CN'><o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>Industry 4.0<o:p></o:p></span></li><li class=MsoNormal style='text-align:justify;mso-list:l1 level1 lfo3'><span style='font-family:"Cambria",serif'>IoT and Cyber-physical Systems<o:p></o:p></span></li></ul><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><b><span style='font-family:"Cambria",serif'>Important dates: <o:p></o:p></span></b></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>Paper Submission Deadline<o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>January 31, 2023<o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><b><span style='font-family:"Cambria",serif'>Paper acceptance notification date <o:p></o:p></span></b></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>March 31, 2023<o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><b><span style='font-family:"Cambria",serif'>Conference<o:p></o:p></span></b></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>June 18-23, 2023<o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><b><span style='font-family:"Cambria",serif'>Submission Guidelines: <o:p></o:p></span></b></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>Please follow the regular submission guidelines of IJCNN 2023. Please notify the chairs of your submission by sending an email to: <a href="mailto:jgarcia@dtic.ua.es">jgarcia@dtic.ua.es</a>. <o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><b><span style='font-family:"Cambria",serif'><o:p> </o:p></span></b></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'> <o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><b><span style='font-family:"Cambria",serif'>Chairs: <o:p></o:p></span></b></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>José García-Rodríguez -University of Alicante (Spain) <o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>Alexandra Psarrou – University of Westminster (UK) <o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>Isabelle Guyon - , U. Paris-Saclay, France and ChaLearn, USA <o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>Andrew Lewis – Griffith University (Australia) <o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><b><span style='font-family:"Cambria",serif'><o:p> </o:p></span></b></p><p class=MsoNormal style='text-align:justify'><b><span style='font-family:"Cambria",serif'>Contact: <o:p></o:p></span></b></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>Email: <a href="mailto:jgarcia@dtic.ua.es"><span style='font-family:"Calibri",sans-serif'>jgarcia@dtic.ua.es</span></a> <o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span style='font-family:"Cambria",serif'>Main Conference webpage: <a href="https://2023.ijcnn.org/">https://2023.ijcnn.org/</a><o:p></o:p></span></p><p class=MsoNormal><o:p> </o:p></p><p class=MsoNormal>Best regards.<o:p></o:p></p><p class=MsoNormal><o:p> </o:p></p><p class=MsoNormal>Jose<o:p></o:p></p><p class=MsoNormal><o:p> </o:p></p><p class=MsoNormal><b><span style='mso-fareast-language:ES'>Jose Garcia-Rodriguez, Phd<o:p></o:p></span></b></p><p class=MsoNormal><b><span style='mso-fareast-language:ES'>Full Professor in Computer Vision, Machine Learning & HPC<o:p></o:p></span></b></p><p class=MsoNormal><b><span style='mso-fareast-language:ES'>Vice-Dean International Relations Polytechnic University College <o:p></o:p></span></b></p><p class=MsoNormal><b><span style='mso-fareast-language:ES'>Director Phd Program in Computer Science <o:p></o:p></span></b></p><p class=MsoNormal><b><span style='mso-fareast-language:ES'>Member of ELLIS Society (European Laboratory for Learning and Intelligent Systems- <a href="https://ellis.eu/members">https://ellis.eu/members</a>)<o:p></o:p></span></b></p><p class=MsoNormal><b><span style='mso-fareast-language:ES'>UA Representative AIDA (Artificial Intelligence Doctoral Academy . <a href="https://www.i-aida.org/">https://www.i-aida.org/</a> )<o:p></o:p></span></b></p><p class=MsoNormal><b><span style='mso-fareast-language:ES'>Dpt. Computer Technology , University of Alicante. PO Box. 99. 03080 Alicante (Spain)<o:p></o:p></span></b></p><p class=MsoNormal><b><span style='mso-fareast-language:ES'>tel: +34 965903400 ext. 2616 mobile: +34 610488989 fax: +34 965909643<o:p></o:p></span></b></p><p class=MsoNormal><b><span style='mso-fareast-language:ES'>email: <a href="mailto:jgarcia@dtic.ua.es">jgarcia@dtic.ua.es</a> skype: jose.garcia.alicante website: <a href="http://josegarcia.dtic.ua.es/">josegarcia.dtic.ua.es</a></span></b><span style='mso-fareast-language:ES'><o:p></o:p></span></p><p class=MsoNormal><o:p> </o:p></p></div></body></html>