{"id":173,"date":"2023-05-23T00:29:56","date_gmt":"2023-05-22T18:59:56","guid":{"rendered":"https:\/\/aiml.3it.in\/?page_id=173"},"modified":"2024-10-10T22:28:03","modified_gmt":"2024-10-10T16:58:03","slug":"keynote_details","status":"publish","type":"page","link":"https:\/\/www.aimlsystems.org\/2024\/keynote_details\/","title":{"rendered":"Keynote Speakers"},"content":{"rendered":"<p>[et_pb_section fb_built=&#8221;1&#8243; admin_label=&#8221;Header&#8221; _builder_version=&#8221;4.19.5&#8243; _module_preset=&#8221;default&#8221; background_color=&#8221;gcid-1bcf785a-50e1-437b-b09f-65567babc1de&#8221; background_image=&#8221;https:\/\/www.aimlsystems.org\/2023\/wp-content\/uploads\/2023\/05\/grid-bg-2.png&#8221; background_size=&#8221;initial&#8221; background_position=&#8221;bottom_center&#8221; background_repeat=&#8221;repeat&#8221; custom_padding=&#8221;||0px|||&#8221; collapsed=&#8221;on&#8221; global_colors_info=&#8221;{%22gcid-1bcf785a-50e1-437b-b09f-65567babc1de%22:%91%22background_color%22%93}&#8221;][et_pb_row column_structure=&#8221;2_3,1_3&#8243; _builder_version=&#8221;4.19.2&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_column type=&#8221;2_3&#8243; _builder_version=&#8221;4.19.2&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_text _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;7f63b212-a10a-4d30-afa2-e478a747ca88&#8243; custom_margin=&#8221;||10px||false|false&#8221; header_2_font_size_phone=&#8221;33px&#8221; custom_css_free_form=&#8221;selector h2{color:white}&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<h2>Keynote Speakers<\/h2>\n<p>[\/et_pb_text][et_pb_code disabled_on=&#8221;on|on|on&#8221; _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;default&#8221; disabled=&#8221;on&#8221; locked=&#8221;off&#8221; global_colors_info=&#8221;{}&#8221;]<!-- [et_pb_line_break_holder] --><!DOCTYPE html><!-- [et_pb_line_break_holder] --><html><!-- [et_pb_line_break_holder] --><script><!-- [et_pb_line_break_holder] -->window.onload = (event) =>{<!-- [et_pb_line_break_holder] -->document.getElementById('search').style.width = '350px';<!-- [et_pb_line_break_holder] -->}<!-- [et_pb_line_break_holder] -->window.onclick = (event) =>{<!-- [et_pb_line_break_holder] -->document.getElementById('options').innerHTML = '';<!-- [et_pb_line_break_holder] -->}<!-- [et_pb_line_break_holder] -->    <!-- [et_pb_line_break_holder] -->var search_list = {<!-- [et_pb_line_break_holder] -->\"AI INDIA TRACK\":'https:\/\/www.aimlsystems.org\/2024\/ai-india-track\/',        <!-- [et_pb_line_break_holder] -->\"BANQUET TALK\":'https:\/\/www.aimlsystems.org\/2024\/banquet-talk\/',        <!-- [et_pb_line_break_holder] -->\"CALL FOR DEMOS AND EXHIBITS\":'https:\/\/www.aimlsystems.org\/2024\/demos\/',         <!-- [et_pb_line_break_holder] -->\"CALL FOR INDUSTRY PAPERS\":'https:\/\/www.aimlsystems.org\/2024\/callindustry\/',         <!-- [et_pb_line_break_holder] -->\"CALL FOR RESEARCH PAPERS\":'https:\/\/www.aimlsystems.org\/2024\/callresearch\/',         <!-- [et_pb_line_break_holder] -->\"HOME\":'https:\/\/www.aimlsystems.org\/2024\/',<!-- [et_pb_line_break_holder] -->\"KEYNOTE SPEAKERS\":'https:\/\/www.aimlsystems.org\/2024\/keynote_details',         <!-- [et_pb_line_break_holder] -->\"ORGANIZING COMMITTEE\":'https:\/\/www.aimlsystems.org\/2024\/organising_committee',      <!-- [et_pb_line_break_holder] -->\"PANELS\":'https:\/\/www.aimlsystems.org\/2024\/panels',  <!-- [et_pb_line_break_holder] -->\"PROGRAM COMMITTEE\":'https:\/\/www.aimlsystems.org\/2024\/committeepro',         <!-- [et_pb_line_break_holder] -->\"REGISTRATION\":'https:\/\/www.aimlsystems.org\/2024\/registration', <!-- [et_pb_line_break_holder] -->\"SCHEDULE\":'https:\/\/www.aimlsystems.org\/2024\/schedule', <!-- [et_pb_line_break_holder] -->\"SPONSORSHIP\":'https:\/\/www.aimlsystems.org\/2024\/sponsorship', <!-- [et_pb_line_break_holder] -->\"TRAVEL AND ACCOMODATION\":'https:\/\/www.aimlsystems.org\/2024\/visa-and-travel',         <!-- [et_pb_line_break_holder] -->\"VISA AND TRAVEL\":'https:\/\/www.aimlsystems.org\/2024\/visa-and-travel',         <!-- [et_pb_line_break_holder] -->\"VOLUNTEER\":'https:\/\/www.aimlsystems.org\/2024\/volunteer', <!-- [et_pb_line_break_holder] -->\"WORKSHOP-GENAI\":'https:\/\/www.aimlsystems.org\/2024\/workshop-genai',         <!-- [et_pb_line_break_holder] -->\"WORKSHOP-SAI4E\":'https:\/\/www.aimlsystems.org\/2024\/sai4e',        <!-- [et_pb_line_break_holder] -->\"WORKSHOPS\":'https:\/\/www.aimlsystems.org\/2024\/workshops'<!-- [et_pb_line_break_holder] -->        }<!-- [et_pb_line_break_holder] -->function searchReq(){<!-- [et_pb_line_break_holder] -->  document.getElementById('options').innerHTML = '';<!-- [et_pb_line_break_holder] -->var val = document.getElementById('search').value;<!-- [et_pb_line_break_holder] -->var list = Object.keys(search_list);<!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] -->for(let i of list){<!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] -->if(i.includes(val.toUpperCase())){<!-- [et_pb_line_break_holder] -->    var elem = document.createElement('div');<!-- [et_pb_line_break_holder] -->    var a = document.createElement('a');<!-- [et_pb_line_break_holder] -->    var but = document.createElement('button')<!-- [et_pb_line_break_holder] -->    <!-- [et_pb_line_break_holder] -->    a.className = 'search_op';<!-- [et_pb_line_break_holder] -->    elem.className = 'search_op';<!-- [et_pb_line_break_holder] -->but.style.fontFamily = 'Helvetica'<!-- [et_pb_line_break_holder] -->but.style.color = \"white\";<!-- [et_pb_line_break_holder] -->but.style.borderColor = \"rgba(0, 0, 120, 0.8)\"<!-- [et_pb_line_break_holder] -->but.style.backgroundColor = \"rgba(0, 0, 120, 0.8)\";<!-- [et_pb_line_break_holder] -->but.style.borderRadius = \"10px\"<!-- [et_pb_line_break_holder] -->but.style.fontWeight = \"bold\";<!-- [et_pb_line_break_holder] -->but.style.margin = \"0px\";<!-- [et_pb_line_break_holder] -->but.style.display = \"block\"<!-- [et_pb_line_break_holder] -->but.style.height = \"50px\";<!-- [et_pb_line_break_holder] -->but.style.width= \"275px\";<!-- [et_pb_line_break_holder] -->a.style.position = \"absolute\"<!-- [et_pb_line_break_holder] -->    elem.style.visibility = \"visible\";<!-- [et_pb_line_break_holder] -->    a.style.visibility = \"visible\";<!-- [et_pb_line_break_holder] -->    elem.style.height = \"50px\";<!-- [et_pb_line_break_holder] -->    a.style.height = \"50px\" <!-- [et_pb_line_break_holder] -->    a.href = list[i];<!-- [et_pb_line_break_holder] -->\ta.append(but)<!-- [et_pb_line_break_holder] -->    but.append(i);<!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] -->    elem.appendChild(a);<!-- [et_pb_line_break_holder] -->    document.getElementById('options').append(elem);<!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] -->}<!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] -->}<!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] -->}<!-- [et_pb_line_break_holder] --><\/script><!-- [et_pb_line_break_holder] --><\/p>\n<style><!-- [et_pb_line_break_holder] -->:hover:-moz-placeholder{<!-- [et_pb_line_break_holder] -->    color:rgba(0, 0, 120, 0.8)<!-- [et_pb_line_break_holder] -->    <!-- [et_pb_line_break_holder] -->  } <!-- [et_pb_line_break_holder] -->  <!-- [et_pb_line_break_holder] -->  :hover::-webkit-input-placeholder { <!-- [et_pb_line_break_holder] -->    color:rgba(0, 0, 120, 0.8)}<!-- [et_pb_line_break_holder] -->  <!-- [et_pb_line_break_holder] -->  :hover:-ms-input-placeholder { <!-- [et_pb_line_break_holder] -->    color:rgba(0, 0, 120, 0.8)}<!-- [et_pb_line_break_holder] -->  <!-- [et_pb_line_break_holder] -->  :hover::-moz-placeholder{ <!-- [et_pb_line_break_holder] -->    color:rgba(0, 0, 120, 0.8)}<!-- [et_pb_line_break_holder] -->  <!-- [et_pb_line_break_holder] -->  <!-- [et_pb_line_break_holder] -->  <!-- [et_pb_line_break_holder] -->  <!-- [et_pb_line_break_holder] -->a{<!-- [et_pb_line_break_holder] -->text-decoration:none<!-- [et_pb_line_break_holder] -->}<!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] -->\t#options{position:absolute;z-index:1}<!-- [et_pb_line_break_holder] -->#search{<!-- [et_pb_line_break_holder] -->border-radius:25px;<!-- [et_pb_line_break_holder] -->font-family:Roboto;<!-- [et_pb_line_break_holder] -->width:0px;<!-- [et_pb_line_break_holder] -->display:inline-block;<!-- [et_pb_line_break_holder] -->height:50px;<!-- [et_pb_line_break_holder] -->text-align:center;<!-- [et_pb_line_break_holder] -->border-color:white;<!-- [et_pb_line_break_holder] -->transition:width 1s;<!-- [et_pb_line_break_holder] -->border-style:solid}<!-- [et_pb_line_break_holder] --><\/style>\n<p><!-- [et_pb_line_break_holder] -->  <!-- [et_pb_line_break_holder] --><svg onclick = \"<!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] -->var width = document.getElementById(&#8216;search&#8217;);<!-- [et_pb_line_break_holder] -->if(width.style.width == &#8216;0px&#8217;){<!-- [et_pb_line_break_holder] -->width.style.width = &#8216;350px&#8217;;<!-- [et_pb_line_break_holder] -->}<!-- [et_pb_line_break_holder] -->else{<!-- [et_pb_line_break_holder] -->width.style.width = &#8216;0px&#8217;}<!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] -->&#8221; style = &#8220;width:30px;height:20px;display:inline-block;&#8221;><!-- [et_pb_line_break_holder] --><circle cx = 8 cy = 8 stroke-width = 3 stroke = \"white\" fill = \"none\" r = 6><\/circle><!-- [et_pb_line_break_holder] --><polyline points = \"12 12 20 20 \" stroke-width = 3 stroke = \"white\" fill = \"none\"><\/polyline><!-- [et_pb_line_break_holder] --><\/svg><!-- [et_pb_line_break_holder] --><input placeholder = \"Search for info about AIMLS 2024 Conference\"  onkeyup = \"searchReq()\" id  = \"search\" type = \"search\" \/><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] --><\/p>\n<div style = \"position:relative\"> <!-- [et_pb_line_break_holder] -->    <!-- [et_pb_line_break_holder] --><\/p>\n<div id = \"options\"><\/div>\n<p><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] --><\/div>\n<p><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] --><\/html><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] --><!-- [et_pb_line_break_holder] -->[\/et_pb_code][\/et_pb_column][et_pb_column type=&#8221;1_3&#8243; _builder_version=&#8221;4.19.2&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_image src=&#8221;https:\/\/www.aimlsystems.org\/2023\/wp-content\/uploads\/2023\/05\/ai-icon-03.png&#8221; title_text=&#8221;ai-icon-03&#8243; align=&#8221;center&#8221; disabled_on=&#8221;on|on|off&#8221; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;6px|||||&#8221; global_colors_info=&#8221;{}&#8221;][\/et_pb_image][\/et_pb_column][\/et_pb_row][\/et_pb_section][et_pb_section fb_built=&#8221;1&#8243; admin_label=&#8221;Features&#8221; module_id=&#8221;about&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; background_color=&#8221;#dbdbdb&#8221; background_image=&#8221;https:\/\/www.aimlsystems.org\/2023\/wp-content\/uploads\/2023\/05\/rm380-10.jpg&#8221; background_blend=&#8221;overlay&#8221; custom_padding=&#8221;3.9%||||false|false&#8221; hover_enabled=&#8221;0&#8243; use_background_color_gradient_phone=&#8221;on&#8221; background_color_gradient_stops_phone=&#8221;#001528 0%|rgba(255, 255, 255, 0) 10%|rgba(255,255,255,0) 70%|#0f0122 100%&#8221; collapsed=&#8221;on&#8221; global_colors_info=&#8221;{}&#8221; sticky_enabled=&#8221;0&#8243;][et_pb_row column_structure=&#8221;1_4,3_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; custom_padding=&#8221;27px||43px|||&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_column type=&#8221;1_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_image src=&#8221;https:\/\/www.aimlsystems.org\/2024\/wp-content\/uploads\/2024\/02\/michael_jordan.jpg&#8221; title_text=&#8221;michael_jordan&#8221; align=&#8221;center&#8221; _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;default&#8221; max_width=&#8221;200px&#8221; custom_margin=&#8221;||15px|||&#8221; filter_saturate=&#8221;0%&#8221; animation_style=&#8221;slide&#8221; border_radii=&#8221;on|115px|115px|115px|115px&#8221; border_color_all=&#8221;#FFFFFF&#8221; box_shadow_style=&#8221;preset2&#8243; global_colors_info=&#8221;{}&#8221; transform_styles__hover_enabled=&#8221;on|hover&#8221; transform_scale__hover_enabled=&#8221;on|hover&#8221; transform_translate__hover_enabled=&#8221;on|hover&#8221; transform_rotate__hover_enabled=&#8221;on|hover&#8221; transform_skew__hover_enabled=&#8221;on|hover&#8221; transform_origin__hover_enabled=&#8221;on|hover&#8221; transform_scale__hover=&#8221;104%|104%&#8221; filter_saturate__hover_enabled=&#8221;on|hover&#8221; filter_saturate__hover=&#8221;100%&#8221; border_width_all__hover_enabled=&#8221;on|hover&#8221; border_width_all__hover=&#8221;1px&#8221; border_radii__hover_enabled=&#8221;on|hover&#8221; border_radii__hover=&#8221;on|115px|115px|115px|115px&#8221;][\/et_pb_image][\/et_pb_column][et_pb_column type=&#8221;3_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_text _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;25d2b0d8-2373-4ae8-9188-0ef4b1bb77f4&#8243; text_text_color=&#8221;#212A4F&#8221; header_4_text_color=&#8221;gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68&#8243; header_4_font_size=&#8221;20px&#8221; custom_margin=&#8221;||15px|||&#8221; global_colors_info=&#8221;{%22gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68%22:%91%22header_4_text_color%22%93}&#8221;]<\/p>\n<h4><a href=\"https:\/\/www2.eecs.berkeley.edu\/Faculty\/Homepages\/jordan.html\" target=\"_blank\" rel=\"noopener\">Prof. Michael Jordan<\/a><\/h4>\n<p>UC Berkeley, USA<\/p>\n<p>[\/et_pb_text][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Title&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p><span>A Collectivist View on AI: \u00a0Collaborative Learning, Statistical Incentives, and Social Welfare<\/span><\/p>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Abstract&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p><span>Artificial intelligence (AI) has focused on a paradigm in which intelligence<\/span><br \/><span>inheres in a single, autonomous agent.\u00a0 Social issues are entirely secondary<\/span><br \/><span>in this paradigm.\u00a0 When AI systems are deployed in social contexts, however,<\/span><br \/><span>the overall design of such systems is often naive&#8212;a centralized entity provides<\/span><br \/><span>services to passive agents and reaps the rewards.\u00a0 Such a paradigm need not be<\/span><br \/><span>the dominant paradigm for information technology.\u00a0 In a broader framing, agents are<\/span><br \/><span>active, they are cooperative, and they wish to obtain value from their participation<\/span><br \/><span>in learning-based systems.\u00a0 Agents may supply data and other resources to the system,<\/span><br \/><span>only if it is in their interest to do so.\u00a0 Critically, intelligence inheres as much<\/span><br \/><span>in the overall system as it does in individual agents, be they humans or computers.<\/span><br \/><span>This is a perspective that is familiar in the social sciences, and a key theme in<\/span><br \/><span>my work is that of bringing economics into contact with foundational issues in<\/span><br \/><span>computing and data sciences.\u00a0 I&#8217;ll emphasize some of the design challenges<\/span><br \/><span>that arise at this tripartite interface.<\/span><\/p>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; hover_enabled=&#8221;0&#8243; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221; sticky_enabled=&#8221;0&#8243;][et_pb_accordion_item title=&#8221;Bio&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; hover_enabled=&#8221;0&#8243; global_colors_info=&#8221;{}&#8221; sticky_enabled=&#8221;0&#8243;]&nbsp;<\/p>\n<p><span><\/p>\n<p> Michael I. Jordan is a researcher at Inria Paris and Professor Emeritus at the University of California, Berkeley.  His research interests bridge the computational, statistical, cognitive, biological and social sciences.  Prof. Jordan is a member of the National Academy of Sciences, a member of the National Academy of Engineering, a member<br \/>\nof the American Academy of Arts and Sciences, and a Foreign Member of the Royal Society.  He was the inaugural winner of the World Laureates Association (WLA) Prize in 2022.  He was a Plenary Lecturer at the International Congress of Mathematicians in 2018.  He has received the Ulf Grenander Prize from the American Mathematical Society, the IEEE John von Neumann Medal, the IJCAI Research Excellence Award, the David E. Rumelhart Prize, and the ACM\/AAAI Allen Newell Award.  In 2016, Prof. Jordan was named the &#8220;most influential computer scientist&#8221; worldwide in an article in Science, based on rankings from the Semantic Scholar search engine. <\/p>\n<p><\/span>[\/et_pb_accordion_item][\/et_pb_accordion][\/et_pb_column][\/et_pb_row][et_pb_row column_structure=&#8221;1_4,3_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; custom_padding=&#8221;45px|||||&#8221; border_width_top=&#8221;1px&#8221; border_color_top=&#8221;#878787&#8243; global_colors_info=&#8221;{}&#8221;][et_pb_column type=&#8221;1_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_image src=&#8221;https:\/\/www.aimlsystems.org\/2024\/wp-content\/uploads\/2024\/02\/Deepak_Bansal.jpg&#8221; title_text=&#8221;Deepak_Bansal&#8221; align=&#8221;center&#8221; _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;default&#8221; max_width=&#8221;200px&#8221; custom_margin=&#8221;||15px|||&#8221; filter_saturate=&#8221;0%&#8221; animation_style=&#8221;slide&#8221; border_radii=&#8221;on|115px|115px|115px|115px&#8221; border_color_all=&#8221;#FFFFFF&#8221; box_shadow_style=&#8221;preset2&#8243; global_colors_info=&#8221;{}&#8221; transform_styles__hover_enabled=&#8221;on|hover&#8221; transform_scale__hover_enabled=&#8221;on|hover&#8221; transform_translate__hover_enabled=&#8221;on|hover&#8221; transform_rotate__hover_enabled=&#8221;on|hover&#8221; transform_skew__hover_enabled=&#8221;on|hover&#8221; transform_origin__hover_enabled=&#8221;on|hover&#8221; transform_scale__hover=&#8221;104%|104%&#8221; filter_saturate__hover_enabled=&#8221;on|hover&#8221; filter_saturate__hover=&#8221;100%&#8221; border_width_all__hover_enabled=&#8221;on|hover&#8221; border_width_all__hover=&#8221;1px&#8221; border_radii__hover_enabled=&#8221;on|hover&#8221; border_radii__hover=&#8221;on|115px|115px|115px|115px&#8221;][\/et_pb_image][\/et_pb_column][et_pb_column type=&#8221;3_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_text _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;25d2b0d8-2373-4ae8-9188-0ef4b1bb77f4&#8243; text_text_color=&#8221;gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68&#8243; header_4_text_color=&#8221;gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68&#8243; header_4_font_size=&#8221;20px&#8221; custom_margin=&#8221;||15px|||&#8221; global_colors_info=&#8221;{%22gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68%22:%91%22text_text_color%22,%22header_4_text_color%22%93}&#8221;]<\/p>\n<h4><a href=\"https:\/\/www.linkedin.com\/in\/deepak-bansal-7578375\/\" target=\"_blank\" rel=\"noopener\">Deepak Bansal<\/a><\/h4>\n<p>Corporate Vice-President, Microsoft Azure, USA<\/p>\n<p>[\/et_pb_text][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Title&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p>Leveraging LLMs for Networking &amp; Security in Cloud Environments<b><br \/><\/b><\/p>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Abstract&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<div>\n<div>Customer networks have grown, mostly organically, large and complex in cloud environments like Azure. Customers are often afraid to make changes and find it hard to diagnose when things go wrong.\u00a0<\/div>\n<div><\/div>\n<div>In this talk, I am going to share how Microsoft is using LLMs to simplify network operations at scale in Azure and how it is enabling the same for its customers through Azure Copilot. On the security side, I will share how LLMs are being used to enable security monitoring and threat hunting.<\/div>\n<\/div>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;on|on|on&#8221; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; disabled=&#8221;on&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Bio&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<div class=\"row\">\n<div class=\"col-9 col-12-medium\">\n<div class=\"text-justify\">Prof. Milind Tambe is Gordon McKay Professor of Computer Science and Director of Center for Research in Computation and Society at Harvard University; concurrently, he is also Principal Scientist and Director &#8220;AI for Social Good&#8221; at Google Research. He is recipient of the IJCAI (International Joint Conference on Artificial Intelligence) John McCarthy Award, AAAI (Association for Advancement of Artificial Intelligence) Feigenbaum Prize, AAAI Robert S. Engelmore Memorial Lecture Award, AAMAS ACM (Association for Computing Machinery) Autonomous Agents Research Award, INFORMS ( Institute for Operations Research and the Management Sciences) Wagner prize for excellence in Operations Research practice and Rist Prize from MORS (Military Operations Research Society). He is a fellow of AAAI and ACM. For his work on AI and public safety, he has received Columbus Fellowship Foundation Homeland security award and commendations and certificates of appreciation from the US Coast Guard, the Federal Air Marshals Service and airport police at the city of Los Angeles.<\/div>\n<\/div>\n<\/div>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][\/et_pb_column][\/et_pb_row][et_pb_row column_structure=&#8221;1_4,3_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; custom_padding=&#8221;45px|||||&#8221; border_width_top=&#8221;1px&#8221; border_color_top=&#8221;#878787&#8243; locked=&#8221;off&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_column type=&#8221;1_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_image src=&#8221;https:\/\/www.aimlsystems.org\/2024\/wp-content\/uploads\/2023\/05\/kavita-bala.jpg&#8221; title_text=&#8221;kavita-bala&#8221; align=&#8221;center&#8221; _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;default&#8221; max_width=&#8221;200px&#8221; custom_margin=&#8221;||15px|||&#8221; filter_saturate=&#8221;0%&#8221; animation_style=&#8221;slide&#8221; border_radii=&#8221;on|115px|115px|115px|115px&#8221; border_color_all=&#8221;#FFFFFF&#8221; box_shadow_style=&#8221;preset2&#8243; global_colors_info=&#8221;{}&#8221; transform_styles__hover_enabled=&#8221;on|hover&#8221; transform_scale__hover_enabled=&#8221;on|hover&#8221; transform_translate__hover_enabled=&#8221;on|hover&#8221; transform_rotate__hover_enabled=&#8221;on|hover&#8221; transform_skew__hover_enabled=&#8221;on|hover&#8221; transform_origin__hover_enabled=&#8221;on|hover&#8221; transform_scale__hover=&#8221;104%|104%&#8221; filter_saturate__hover_enabled=&#8221;on|hover&#8221; filter_saturate__hover=&#8221;100%&#8221; border_width_all__hover_enabled=&#8221;on|hover&#8221; border_width_all__hover=&#8221;1px&#8221; border_radii__hover_enabled=&#8221;on|hover&#8221; border_radii__hover=&#8221;on|115px|115px|115px|115px&#8221;][\/et_pb_image][\/et_pb_column][et_pb_column type=&#8221;3_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_text _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;25d2b0d8-2373-4ae8-9188-0ef4b1bb77f4&#8243; text_text_color=&#8221;gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68&#8243; header_4_text_color=&#8221;gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68&#8243; header_4_font_size=&#8221;20px&#8221; custom_margin=&#8221;||15px|||&#8221; global_colors_info=&#8221;{%22gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68%22:%91%22text_text_color%22,%22header_4_text_color%22%93}&#8221;]<\/p>\n<h4><a href=\"https:\/\/www.deakin.edu.au\/about-deakin\/people\/svetha-venkatesh\" target=\"_blank\" rel=\"noopener\"><\/a><\/h4>\n<h4><a href=\"https:\/\/www.cs.cornell.edu\/~kb\/\">Prof. Kavita Bala<\/a><\/h4>\n<p>Cornell University, USA<\/p>\n<p>[\/et_pb_text][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Title&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p>Visual Discovery and Understanding in Satellite Imagery<\/p>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Abstract&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p>We are capturing visual data of the planet at an unprecedented scale through satellite imagery, drones, social media photo collections, and more. Tools to understand and discover insights from this visual data are of immense value to scientists, cultural anthropologists, and policy makers. In this talk I will describe research from my group that analyzes spatio-temporal image collections to understand a wide range of phenomena including style trends, cultural events, crop cycles, natural disasters like wildfires, and more. We introduce new vision-language models to allow unsupervised discovery of open-world concepts, to discover spatio-temporal trends and events, and new datasets and benchmarks for satellite events. I will describe potential applications of these tools to crop science, climate science, disaster discovery, and cultural erasure.<\/p>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Bio&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<div class=\"row\">\n<div class=\"col-9 col-12-medium\">\n<div class=\"text-justify\">\n<p>Kavita Bala is the inaugural dean of the Cornell Bowers College of Computing and Information Science at Cornell University. Bala received her S.M. and Ph.D. from the Massachusetts Institute of Technology (MIT). Before becoming dean, she served as the chair of the Cornell Computer Science department.\u00a0 Bala leads research in computer vision and computer graphics in visual discovery and recognition; material modeling and acquisition; physically based rendering; and perception. She co-founded GrokStyle, a visual recognition AI company, which drew IKEA as a client, and was acquired by Facebook in 2019. Bala is the recipient of the SIGGRAPH Computer Graphics Achievement Award, the IIT Bombay Distinguished Alumnus Award, and is a Fellow of the Association for Computing Machinery (ACM) and the SIGGRAPH Academy. Bala has received multiple teaching awards, has served as the Editor-in-Chief of Transactions on Graphics (TOG), and serves on the boards of the Toyota Technological Institute at Chicago (TTIC), and non-profits <a href=\"https:\/\/www.colorstack.org\/\" target=\"_blank\" rel=\"noopener\">Colorstack<\/a>, aimed at increasing representation in computer science, and the Ithaca\u00a0<a href=\"https:\/\/sciencenter.org\/\" target=\"_blank\" rel=\"noopener\">Sciencenter<\/a>.<\/p>\n<\/div>\n<div class=\"text-justify\"><\/div>\n<div class=\"text-justify\"><\/div>\n<div class=\"text-justify\"><\/div>\n<div class=\"text-justify\"><\/div>\n<\/div>\n<\/div>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][\/et_pb_column][\/et_pb_row][et_pb_row column_structure=&#8221;1_4,3_4&#8243; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;default&#8221; custom_padding=&#8221;45px|||||&#8221; border_width_top=&#8221;1px&#8221; border_color_top=&#8221;#878787&#8243; global_colors_info=&#8221;{}&#8221;][et_pb_column type=&#8221;1_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_image src=&#8221;https:\/\/www.aimlsystems.org\/2024\/wp-content\/uploads\/2024\/05\/Dan_roth.jpg&#8221; title_text=&#8221;Dan_roth&#8221; align=&#8221;center&#8221; _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;default&#8221; max_width=&#8221;200px&#8221; custom_margin=&#8221;||15px|||&#8221; filter_saturate=&#8221;0%&#8221; animation_style=&#8221;slide&#8221; border_radii=&#8221;on|115px|115px|115px|115px&#8221; border_color_all=&#8221;#FFFFFF&#8221; box_shadow_style=&#8221;preset2&#8243; global_colors_info=&#8221;{}&#8221; transform_styles__hover_enabled=&#8221;on|hover&#8221; transform_scale__hover_enabled=&#8221;on|hover&#8221; transform_translate__hover_enabled=&#8221;on|hover&#8221; transform_rotate__hover_enabled=&#8221;on|hover&#8221; transform_skew__hover_enabled=&#8221;on|hover&#8221; transform_origin__hover_enabled=&#8221;on|hover&#8221; transform_scale__hover=&#8221;104%|104%&#8221; filter_saturate__hover_enabled=&#8221;on|hover&#8221; filter_saturate__hover=&#8221;100%&#8221; border_width_all__hover_enabled=&#8221;on|hover&#8221; border_width_all__hover=&#8221;1px&#8221; border_radii__hover_enabled=&#8221;on|hover&#8221; border_radii__hover=&#8221;on|115px|115px|115px|115px&#8221;][\/et_pb_image][\/et_pb_column][et_pb_column type=&#8221;3_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_text _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;25d2b0d8-2373-4ae8-9188-0ef4b1bb77f4&#8243; text_text_color=&#8221;gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68&#8243; header_4_text_color=&#8221;gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68&#8243; header_4_font_size=&#8221;20px&#8221; custom_margin=&#8221;||15px|||&#8221; global_colors_info=&#8221;{%22gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68%22:%91%22text_text_color%22,%22header_4_text_color%22%93}&#8221;]<\/p>\n<h4><a href=\"https:\/\/www.linkedin.com\/in\/dan-roth-8667361\/\" target=\"_blank\" rel=\"noopener\">Prof. Dan Roth<\/a><\/h4>\n<p>Oracle and the University of Pennsylvania<\/p>\n<p>[\/et_pb_text][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Title&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p>Reasoning Myths about Language Models: What is Next?<\/p>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Abstract&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p>The rapid progress made over the last few years in generating linguistically coherent natural language has blurred, in the mind of many, the difference between natural language generation, understanding, and the ability to reason with respect to the world. Nevertheless, robust support of high-level decisions that depend on natural language understanding, and one that requires dealing with \u201ctruthfulness\u201d are still beyond our capabilities, partly since most of these tasks are very sparse, often require grounding, and may depend on new types of supervision signals.<br \/>I will discuss some of the challenges underlying reasoning and argue that we should focus on LLMs as orchestrators \u2013 coordinating and managing multiple models and special purpose agents. I will discuss some of the challenges and present some of our work in this space, focusing on supporting planning and a range of quantitative, visual, and spatial reasoning tasks.<\/p>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Bio&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<div class=\"row\">\n<div class=\"col-9 col-12-medium\">Dan Roth is the Eduardo D. Glandt Distinguished Professor at the Department of Computer and Information Science, University of Pennsylvania and the Chief AI Scientist at Oracle. Until June 2024 Dan was a VP\/Distinguished Scientist at AWS AI. In his role at AWS Roth led over the last three years the scientific effort behind the first-generation Generative AI products from AWS, including Titan Models, Amazon Q efforts, and Bedrock, from inception until they became generally available. <br \/>Dan is a Fellow of the AAAS, ACM, AAAI, and ACL. In 2017, Dan was awarded the John McCarthy Award; he was recognized for \u201cfor major conceptual and theoretical advances in the modeling of natural language understanding, machine learning, and reasoning.\u201d He has published broadly in natural language processing, machine learning, knowledge representation and reasoning, and learning theory, was the Editor-in-Chief of the Journal of Artificial Intelligence Research (JAIR) and has served as a Program Chair and Conference Chair for the major conferences in his research areas. Roth has been involved in several startups; most recently he was a co-founder and chief scientist of NexLP, a startup that leverages the latest advances in Natural Language Processing, Cognitive Analytics, and Machine Learning in the legal and compliance domains. NexLP was acquired by Reveal. Dan received his B.A Summa cum laude in Mathematics from the Technion, Israel and his Ph.D. in Computer Science from Harvard University in 1995.<\/div>\n<\/div>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][\/et_pb_column][\/et_pb_row][et_pb_row column_structure=&#8221;1_4,3_4&#8243; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;default&#8221; custom_padding=&#8221;45px|||||&#8221; border_width_top=&#8221;1px&#8221; border_color_top=&#8221;#878787&#8243; locked=&#8221;off&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_column type=&#8221;1_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_image src=&#8221;https:\/\/www.aimlsystems.org\/2024\/wp-content\/uploads\/2024\/03\/AI-ML-Photo-e1710219861596.jpeg&#8221; title_text=&#8221;AI ML Photo&#8221; align=&#8221;center&#8221; _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;default&#8221; max_width=&#8221;200px&#8221; custom_margin=&#8221;||15px|||&#8221; filter_saturate=&#8221;0%&#8221; animation_style=&#8221;slide&#8221; border_radii=&#8221;on|115px|115px|115px|115px&#8221; border_color_all=&#8221;#FFFFFF&#8221; box_shadow_style=&#8221;preset2&#8243; global_colors_info=&#8221;{}&#8221; transform_styles__hover_enabled=&#8221;on|hover&#8221; transform_scale__hover_enabled=&#8221;on|hover&#8221; transform_translate__hover_enabled=&#8221;on|hover&#8221; transform_rotate__hover_enabled=&#8221;on|hover&#8221; transform_skew__hover_enabled=&#8221;on|hover&#8221; transform_origin__hover_enabled=&#8221;on|hover&#8221; transform_scale__hover=&#8221;104%|104%&#8221; filter_saturate__hover_enabled=&#8221;on|hover&#8221; filter_saturate__hover=&#8221;100%&#8221; border_width_all__hover_enabled=&#8221;on|hover&#8221; border_width_all__hover=&#8221;1px&#8221; border_radii__hover_enabled=&#8221;on|hover&#8221; border_radii__hover=&#8221;on|115px|115px|115px|115px&#8221;][\/et_pb_image][\/et_pb_column][et_pb_column type=&#8221;3_4&#8243; _builder_version=&#8221;4.21.0&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_text _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;25d2b0d8-2373-4ae8-9188-0ef4b1bb77f4&#8243; text_text_color=&#8221;gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68&#8243; header_4_text_color=&#8221;gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68&#8243; header_4_font_size=&#8221;20px&#8221; custom_margin=&#8221;||15px|||&#8221; global_colors_info=&#8221;{%22gcid-5fa2e3a6-d98c-4022-811a-b5fb6fa40d68%22:%91%22text_text_color%22,%22header_4_text_color%22%93}&#8221;]<\/p>\n<h4><a href=\"https:\/\/www.cs.columbia.edu\/~nayar\/\">Prof. Shree Nayar<\/a><\/h4>\n<p>Columbia University, USA<\/p>\n<p>[\/et_pb_text][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Title&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p>Computational Imaging and Future Cameras<\/p>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Abstract&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p>Computational imaging uses new optics to capture a coded image, and an appropriate algorithm to decode the captured image. This approach has enabled mobile devices to produce images that are rich, immersive and interactive. In this talk, we will show examples of computational cameras that are transforming the way visual information is captured, communicated and used by both humans and machines.<\/p>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][et_pb_accordion open_toggle_background_color=&#8221;#f7f7f7&#8243; icon_color=&#8221;#0C71C3&#8243; use_icon_font_size=&#8221;on&#8221; disabled_on=&#8221;off|off|off&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; custom_margin=&#8221;||14px|||&#8221; animation_style=&#8221;slide&#8221; animation_direction=&#8221;bottom&#8221; animation_intensity_slide=&#8221;18%&#8221; border_radii=&#8221;on|30px|30px|30px|30px&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_accordion_item title=&#8221;Bio&#8221; open=&#8221;on&#8221; _builder_version=&#8221;4.25.1&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<div class=\"row\">\n<div class=\"col-9 col-12-medium\">\n<div class=\"text-justify\"><\/div>\n<div class=\"text-justify\">Shree K. Nayar is the T. C. Chang Professor of Computer Science at Columbia University. He heads the Columbia Imaging and Vision Laboratory (CAVE), which develops computational imaging and computer vision systems. He received his PhD degree in Electrical and Computer Engineering from the Robotics Institute at Carnegie Mellon University. For his research and teaching he has received several honors including the David Marr Prize (1990 and 1995), the David and Lucile Packard Fellowship (1992), the National Young Investigator Award (1993), the NTT Distinguished Scientific Achievement Award (1994), the Keck Foundation Award for Excellence in Teaching (1995), the Columbia Great Teacher Award (2006), the Carnegie Mellon Alumni Achievement Award (2009), Sony Appreciation Honor (2014), the Columbia Engineering Distinguished Faculty Teaching Award (2015), the IEEE PAMI Distinguished Researcher Award (2019), the Funai Achievement Award (2021), and the Okawa Prize (2022). For his contributions to computer vision and computational imaging, he was elected to the National Academy of Engineering in 2008, the American Academy of Arts and Sciences in 2011, and the National Academy of Inventors in 2014.<\/div>\n<div class=\"text-justify\"><\/div>\n<div class=\"text-justify\"><\/div>\n<div class=\"text-justify\"><\/div>\n<\/div>\n<\/div>\n<p>[\/et_pb_accordion_item][\/et_pb_accordion][\/et_pb_column][\/et_pb_row][\/et_pb_section][et_pb_section fb_built=&#8221;1&#8243; _builder_version=&#8221;4.23.4&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][\/et_pb_section]<\/p>\n","protected":false},"excerpt":{"rendered":"<p>Keynote Speakers<\/p>\n","protected":false},"author":1,"featured_media":0,"parent":0,"menu_order":0,"comment_status":"closed","ping_status":"closed","template":"","meta":{"_et_pb_use_builder":"on","_et_pb_old_content":"<!-- wp:paragraph -->\n<p>This is an example page. It's different from a blog post because it will stay in one place and will show up in your site navigation (in most themes). Most people start with an About page that introduces them to potential site visitors. It might say something like this:<\/p>\n<!-- \/wp:paragraph -->\n\n<!-- wp:quote -->\n<blockquote class=\"wp-block-quote\"><!-- wp:paragraph -->\n<p>Hi there! I'm a bike messenger by day, aspiring actor by night, and this is my website. I live in Los Angeles, have a great dog named Jack, and I like pi\u00f1a coladas. (And gettin' caught in the rain.)<\/p>\n<!-- \/wp:paragraph --><\/blockquote>\n<!-- \/wp:quote -->\n\n<!-- wp:paragraph -->\n<p>...or something like this:<\/p>\n<!-- \/wp:paragraph -->\n\n<!-- wp:quote -->\n<blockquote class=\"wp-block-quote\"><!-- wp:paragraph -->\n<p>The XYZ Doohickey Company was founded in 1971, and has been providing quality doohickeys to the public ever since. Located in Gotham City, XYZ employs over 2,000 people and does all kinds of awesome things for the Gotham community.<\/p>\n<!-- \/wp:paragraph --><\/blockquote>\n<!-- \/wp:quote -->\n\n<!-- wp:paragraph -->\n<p>As a new WordPress user, you should go to <a href=\"https:\/\/www.aimlsystems.org\/2023\/wp-admin\/\">your dashboard<\/a> to delete this page and create new pages for your content. Have fun!<\/p>\n<!-- \/wp:paragraph -->","_et_gb_content_width":"","footnotes":""},"class_list":["post-173","page","type-page","status-publish","hentry"],"_links":{"self":[{"href":"https:\/\/www.aimlsystems.org\/2024\/wp-json\/wp\/v2\/pages\/173","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.aimlsystems.org\/2024\/wp-json\/wp\/v2\/pages"}],"about":[{"href":"https:\/\/www.aimlsystems.org\/2024\/wp-json\/wp\/v2\/types\/page"}],"author":[{"embeddable":true,"href":"https:\/\/www.aimlsystems.org\/2024\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/www.aimlsystems.org\/2024\/wp-json\/wp\/v2\/comments?post=173"}],"version-history":[{"count":59,"href":"https:\/\/www.aimlsystems.org\/2024\/wp-json\/wp\/v2\/pages\/173\/revisions"}],"predecessor-version":[{"id":5906,"href":"https:\/\/www.aimlsystems.org\/2024\/wp-json\/wp\/v2\/pages\/173\/revisions\/5906"}],"wp:attachment":[{"href":"https:\/\/www.aimlsystems.org\/2024\/wp-json\/wp\/v2\/media?parent=173"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}