-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.html
294 lines (253 loc) · 16.4 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
<html>
<head>
<title>GLAMOR Lab</title>
<link rel="icon" type="image/x-icon" href="./thumbnails/favicon.png">
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-KyZXEAg3QhqLMpG8r+8fhAXLRk2vvoC2f3B09zVXn8CA5QIVfZOJ3BCsw2P0p/We" crossorigin="anonymous">
<link href='https://fonts.googleapis.com/css?family=Lato:400,700' rel='stylesheet' type='text/css'>
<link rel="stylesheet" href="./main.css">
<link href='https://fonts.googleapis.com/css?family=Nunito' rel='stylesheet'>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js" integrity="sha384-MrcW6ZMFYlzcLA8Nl+NtUVF0sA7MsXsP1UyJoMp4YLEuNSfAP+JcXn/tWtIaxVXM" crossorigin="anonymous"></script>
<script src="people/phd.js"></script>
<base href="/">
<!-- General CSS for this page -->
<style>
@media screen and (max-width: 480px) {
#news > * {
padding: 0 0 0 0;
}
#news {
margin: 0 0 0 0.5em;
}
}
#banner {
height: 400px;
}
@media screen and (max-width: 480px) {
#banner {
height: 200px
}
}
</style>
<!-- Defines .bib block -->
<style>
.bib {
font-family: 'Roboto Mono', Roboto, 'Lucida Sans Typewriter', 'Lucida Typewriter', monospace;
font-size: 12px;
padding: 8px 8px 8px 8px;
border-style: ridge;
background-color: aliceblue;
}
</style>
<!-- Defines scripts -->
<script>
var world_scope_descs = ["", "World Scope 1 - Corpora and Representations", "World Scope 2 - The Written Word", "World Scope 3 - The World of Sights and Sounds", "World Scope 4 - Embodiment and Action", "World Scope 5 - The Social World"];
function onPageLoad() {
populateStudents();
}
function populateStudents() {
// PhD Students
var full_names = phd_students.map(x => x.full_names);
var display_names = phd_students.map(x => x.display_names);
var websites = phd_students.map(x => x.websites);
var photos = phd_students.map(x => x.photos);
var roles = phd_students.map(x => x.roles);
var note = phd_students.map(x => x.note);
var idx = 0;
var order = [...Array(full_names.length).keys()].map(x => ++x);
order = order.sort((a, b) => 0.5 - Math.random());
for (idx=0; idx < full_names.length; idx++) {
jdx = order[idx] - 1;
var string_out = '';
string_out += '<div class="card mb-3" style="max-width: 540px; min-height: 160px;"><div class="row g-0"><div class="image-cropper">';
var photo = (photos[jdx].length > 0) ? 'glamor_photos/' + photos[jdx] : 'thumbnails/ws1.png';
string_out += '<a href="' + websites[jdx] + '" target="_blank"><img src="' + photo + '" class="img-fluid rounded-start" title="' + full_names[jdx] + '"></a>';
string_out += '</div></div><div class="row g-0"><div class="card-body">';
string_out += '<h5 class="card-title">' + display_names[jdx] + '</h5>';
string_out += '<p class="card-text">' + roles[jdx] + "<br/>" + note[jdx] + "<br/>";
var br_idx = 0;
//for (br_idx=0; br_idx < 2 - ws[jdx].length; br_idx++) {
// string_out += '<br/>';
//}
var ws_idx = 0;
//for (ws_idx=0; ws_idx < ws[jdx].length; ws_idx++) {
// string_out += '<br/><a href="world_scopes.html#ws' + ws[jdx][ws_idx] + '" target="_blank"><img class="tiny-thumbnail" src="thumbnails/ws' + ws[jdx][ws_idx] + '.png" title="' + world_scope_descs[ws[jdx][ws_idx]] + '"></a> ' + ws_themes[jdx][ws_idx];
//}
//for (br_idx=0; br_idx < 3 - ws[jdx].length; br_idx++) {
//string_out += '<br/>';
//}
string_out += '</p></div></div></div>';
// Load to left/right side based on parity, populating left side first in case of odd number
var side = (idx % 3 == 0) ? document.getElementById("phd_students_col1") : ((idx % 3 == 1) ? document.getElementById("phd_students_col2") : document.getElementById("phd_students_col3"));
side.innerHTML += string_out;
}
// Undergraduate Students
var full_names = ["Minh (Evelyn) Vu", "Kush Bhagat", "Chidera Iwudyke", "Tanis Sarbatananda"];
var display_names = ["Evelyn Vu", "Kush Bhagat", "Chidera Iwudyke", "Tanis Sarbatananda"];
var affiliation = ["Cal Poly Pomona", "UT Dallas", "UMBC", "Los Angeles City College"];
var photos = ["", "", "", ""];
var program = ["Robotics REU", "USC SURE", "USC SURE", "USC/LACC ASSURE"];
var program_link = ["https://www.cs.usc.edu/reu/", "https://viterbiundergrad.usc.edu/research/sure/", "https://viterbiundergrad.usc.edu/research/sure/", ""]
var idx = 0;
var order = [...Array(full_names.length).keys()].map(x => ++x);
order = order.sort((a, b) => 0.5 - Math.random());
for (idx=0; idx < full_names.length; idx++) {
jdx = order[idx] - 1;
var string_out = '';
string_out += '<div class="card mb-3" style="max-width: 540px; min-height: 160px;"><div class="row g-0"><div class="col-md-4 my-0">';
var photo = (photos[jdx].length > 0) ? 'glamor_photos/' + photos[jdx] : 'thumbnails/ws1.png';
string_out += '<img src="' + photo + '" class="img-fluid rounded-start" title="' + full_names[jdx] + '">';
string_out += '</div><div class="col-md-8"><div class="card-body">';
string_out += '<h5 class="card-title">' + display_names[jdx] + '</h5>';
string_out += '<p class="card-text">' + affiliation[jdx] + '<br/><br/>';
if (program_link[jdx].length > 0) {
string_out += '<a href="' + program_link[jdx] + '">' + program[jdx] + '</a></p>';
} else {
string_out += program[jdx] + '</p>';
}
string_out += '</div></div></div></div>';
var side = (idx % 2 == 0) ? document.getElementById("ug_students_left") : document.getElementById("ug_students_right");
side.innerHTML += string_out;
}
}
</script>
</head>
<body onload="onPageLoad()">
<nav class="navbar navbar-expand-lg navbar-light" style="background-color: #990000;">
<div class="container-fluid">
<a class="navbar-brand" href="#">
<img class="glamor-thumbnail" src="thumbnails/glamor-logo-big.svg" title="GLAMOR lab logo">
</a>
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarSupportedContent" aria-controls="navbarSupportedContent" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarSupportedContent">
<ul class="navbar-nav ms-auto mb-lg-0" >
<li class="nav-item"><a id="nav-link-custom" class="nav-link" href="people.html">People</a></li>
<li class="nav-item"><a id="nav-link-custom" class="nav-link" href="publications.html">Publications</a></li>
<li class="nav-item"><a id="nav-link-custom" class="nav-link" href="sponsors.html">Sponsors</a></li>
<li class="nav-item"><a id="nav-link-custom" class="nav-link" href="opportunities.html">Opportunities</a></li>
<li class="nav-item"><a id="nav-link-custom" class="nav-link" href="fun.html">Fun!</a></li>
</ul>
</div>
</div>
</nav>
<div id="container">
<br/>
<div class="row">
<p style="color:#990000;text-align:justify;font-size:28.75px">GLAMOR: G<span style="color:black">rounding </span> L<span style="color:black">anguage in </span>A<span style="color:black">ctions, </span>M<span style="color:black">ultimodal </span>O<span style="color:black">bservations and </span>R<span style="color:black">obots</span></p>
<br/>
<div class="col-md-5">
<img src="glamor_photos/lab-pic-04_2023.jpg" class="img col-md-12" style="border:2px solid #000">
</div>
<div class="col-md-7">
<p align="justify" style="font-size:20px">
Welcome to the GLAMOR Lab ✨ at the <span style="color:#990000"><b>University of Southern California</b></span>! We bring together <b>natural language processing and robotics</b> to connect language to the world (<b>RoboNLP</b>). Our lab is broadly interested in connecting language to agent perception and action, and lifelong learning through interaction.
</p>
</div>
</div>
<!--
<div class="row">
<a id="news"></a>
<div class="row">
<div class="col-md-12">
<table class="table-sm"><tr><td colspan="4"><h4>News</h4></td></tr>
<tr><td class="col-md-3"><b>Opportunity!</b><br/> REU</td><td class="col-md-5">Glass is to Break as Rubber is to Bend: Analogies for Natural Language Processing</td><td class="col-md-2"><a class="btn btn-light" style="padding:5px;" href="#analogies_urap">details</a></td><td class="col-md-2">Fall 2022-Spring 2023</td></tr>
<tr><td class="col-md-3"><b>Robotics Ed Week</b><br/> Viterbi K-12 STEM Center</td><td class="col-md-5">Talking to Robots: Challenges for Communicating with Robots in Text</td><td class="col-md-2"><a class="btn btn-light" style="padding:5px;" href="https://viterbik12.usc.edu/robotics-ed-week/">website</a></td><td class="col-md-2">April 8, 2022</td></tr>
</table>
</div>
</div>
</div>
-->
<br/>
<div class="row g-0">
<h4 style="text-align:left;color:#990000;margin-bottom:20px">Research Areas</h4>
<div class="row me-1 col-md-4" id="ws3">
<h5 style="text-align:left; padding-left: 0"><img class="tiny-thumbnail" src="thumbnails/ws3.png"> Language & Perception</h5>
<p align="justify" style="font-size:20px;vertical-align: top; padding-left: 0">
Language paired with sensory perception like vision, audio, and haptics. This scope includes audio-visual speech recognition, visual dialog, and recognizing heavy means increased physical weight.
</p>
</div>
<div class="row col-md-4 me-0" id="ws4">
<h5 style="text-align:left; padding-left: 0"><img class="tiny-thumbnail" src="thumbnails/ws4.png"> Embodiment & Action</h5>
<p align="justify" style="font-size:20px;vertical-align: top; padding-left: 0">
Language paired with or leading to world actions. This scope includes learning that <i>left</i> corresponds to a spatial orientation, and that <i>it's hot</i> is a pragmatic warning against physically touching an object.
</p>
</div>
<div class="row col-md-4 me-0" id="ws5">
<h5 style="text-align:left; padding-left: 0"><img class="tiny-thumbnail" src="thumbnails/ws5.png"> The Social World</h5>
<p align="justify" style="font-size:20px;vertical-align: top; padding-left: 0">
Language is what language does, and so language use in social contexts to cause changes in others' behavior and states of mind is the highest scope for grounded natural langauge use.<br/>
</p>
</div>
</div>
<br/>
<div class="row g-0">
<div class="row col-md-8" id="group">
<div class="col-md-12">
<h4 style="text-align:left;color:#990000;margin-bottom:20px;vertical-align:top">Group</h4>
</div>
<div class="col-md-4">
<div class="card mb-3" style="max-width: 540px; min-height: 160px;">
<div class="row g-0">
<div class="image-cropper" style="border-radius:50%">
<a href="https://jessethomason.com/" target="_blank"><img src="glamor_photos/jesse.jpeg" class="img-fluid rounded-start" title="Jesse Thomason"></a>
</div>
</div><div class="row g-0">
<div class="card-body">
<h5 class="card-title">Jesse Thomason</h5>
<p class="card-text">Principal Investigator</p>
</div>
</div>
</div>
</div><div class="col-md-8"></div>
<div class="col-md-4 px-0" id="phd_students_col1"></div>
<div class="col-md-4 px-0" id="phd_students_col2"></div>
<div class="col-md-4 px-0" id="phd_students_col3"></div>
<!--
<div class="col-md-12">
<h5 style="text-align:left">Undergraduate Students</h5>
</div>
<div class="col-md-6" id="ug_students_left"></div>
<div class="col-md-6" id="ug_students_right"></div>
-->
</div>
<div class="row col-md-4 g-0">
<div class="col-md-12">
<h4 style="text-align:left;color:#990000;margin-bottom:20px;padding-bottom:0px;vertical-align:top">News</h4>
</div>
<div class="6u" style="padding-top: 0px; margin-top: 10px; background-color: #f8f8f8;vertical-align:top " id="news">
<div class="content" style="overflow-y: scroll; padding-left: 20px; max-height: 1100px; padding-right: 10px; margin-top: 10px; text-align: left" id="div-news">
<ul style="list-style-type: none; padding: 0; margin-left: 0; margin-bottom: 10">
<li><b>08/2024</b> Tejas has been selected as an <a href="https://viterbischool.usc.edu/news/2024/12/uscamazon-center-announces-2024-2025-projects-and-fellows/">Amazon ML Fellow</a> by the USC-Amazon Center on Secure and Trusted Machine Learning! </li>
<li><b>05/2024</b> <a href="https://arxiv.org/abs/2402.08191">One paper</a> accepted to RSS 2024 and <a href="https://arxiv.org/abs/2402.15610">one paper</a> accepted to ACL Findings 2024!
<li><b>03/2024</b> New <a href="https://arxiv.org/abs/2403.17246">pre-print</a> on multi-agent task planning!</li>
<li><b>03/2024</b> Three papers accepted to NAACL 2024!</li>
<li><b>02/2024</b> New pre-prints on <a href="https://arxiv.org/abs/2311.09612">visual document understanding</a> and <a href="https://arxiv.org/abs/2402.15610">selective prediction for vision-language reasoning</a>!
<li><b>10/2023</b> <a href="https://arxiv.org/abs/2305.14901">Chain-of-Questions</a> has been accepted to EMNLP 2023!</li>
<li><b>05/2023</b> <a href="https://openreview.net/pdf?id=Hqb3t4Jqrk">Self-supervised 3D Representations</a> is a lightning talk at <a href="https://microsoft.github.io/robotics.pretraining.workshop.icra/">ICRA Pretraining for Robotics Workshop</a> </li>
<li><b>05/2023</b> <a href="https://sites.google.com/view/visarl">ViSaRL: Visual RL Guided By Human Saliency</a> is a Spotlight Talk at <a href="https://microsoft.github.io/robotics.pretraining.workshop.icra/">ICRA Pretraining for Robotics Workshop</a> </li>
<li><b>05/2023</b> <a href="https://arxiv.org/abs/2302.14030">One paper</a> accepted to Interspeech 2023! </li>
<li><b>05/2023</b> <a href="https://arxiv.org/abs/2304.02168">One paper</a> accepted to CoLLAs 2023! </li>
<li><b>05/2023</b> Leticia and Tejas received the Viterbi Graduate Mentorship and Viterbi Undergraduate Research Mentorship Awards, respectively!</li>
<li><b>04/2023</b> The lab participated in the <a href="https://viterbischool.usc.edu/news/2023/04/the-top-six-gifs-from-usc-robotics-open-house-2023/">USC Robotics Open House</a> for middle and high school students.</li>
<li><b>04/2023</b> <a href="https://arxiv.org/abs/2207.14525">Curriculum Learning for Data-Efficient Vision-Language Alignment</a> will be presented at the O-DRUM Workshop at CVPR 2023.</b>
<li><b>02/2023</b> <a href="http://arxiv.org/abs/2302.14030">New pre-print</a>! We study the challenge of training embodied agents that can follow spoken instructions.</b>
<li><b>02/2023</b> <a href="https://arxiv.org/abs/2210.03087">IVLN</a> has been accepted to CVPR 2023!</li>
<li><b>01/2023</b> Lee's <a href="https://arxiv.org/abs/2302.05759\">paper</a> on sign language phonology has been accepted to EACL 2023!</li>
<li><b>01/2023</b> <a href="https://arxiv.org/abs/2209.11302">ProgPrompt</a> has been accepted to ICRA 2023!</li>
<li><b>12/2022</b> <a href="https://arxiv.org/abs/2210.15037\">Transformer Adapters for Robot Learning</a> is a Spotlight Talk at the <a href=\"https://sites.google.com/view/corl2022-prl">Pretraining Robot Learning</a> workshop at CoRL 2022!</li>
<li><b>11/2022</b> One paper accepted to EMNLP 2022! <a href="https://arxiv.org/abs/2210.15037\">Generalization Differences between End-to-End and Neuro-Symbolic Vision-Language Reasoning Systems</a></li>
<li><b>10/2022</b> New VLN benchmark release! <a href="https://arxiv.org/abs/2210.03087">IVLN</a> challenges agents to follow a language-guided tour of a home, enabling them to leverage persistent memory.</li>
<li><b>09/2022</b> New pre-print! <a href="https://arxiv.org/abs/2209.11302">ProgPrompt</a> adapts LLMs for situated robot task planning by prompting them with pythonic programs.</li>
<li><b>09/2022</b> <a href="https://arxiv.org/abs/2206.09059">CLiMB</a> 🧗♂️ was accepted to the <a href="https://neurips.cc/Conferences/2022">NeurIPS 2022</a> Datasets and Benchmarks Track! </li>
<li><b>06/2022</b> Pre-print alert! We introduce <a href="https://arxiv.org/abs/2206.09059">CLiMB</a> 🧗♂️, a new continual learning benchmark for vision-and-language tasks.</li>
<li><b>05/2022</b> REU research opportunity! Available Fall 2022-Spring 2023. <a href="opportunities.html#analogies_urap">More details</a>.</li>
<li><b>04/2022</b> Prof. Thomason talked to high school students at Viterbi K-12 STEM Center about robotics at <a href="https://viterbik12.usc.edu/robotics-ed-week/">Robotics Ed Week</a>.</li>
</ul>
</div>
</div>
</div>
</div>
</div>
</body>
</html>